├── .gitignore
├── .readthedocs.yaml
├── LICENSE
├── PySimpleAutomata
├── AFW.py
├── DFA.py
├── NFA.py
├── __init__.py
└── automata_IO.py
├── README.rst
├── doc
├── Makefile
├── README.rst
├── make.bat
└── source
│ ├── AFW.rst
│ ├── DFA.rst
│ ├── IO.rst
│ ├── NFA.rst
│ ├── _static
│ ├── accepting_node.png
│ ├── dfa_example.png
│ ├── nfa_example.png
│ ├── nodes.png
│ ├── root_node.png
│ └── transition.png
│ ├── automata_IO.rst
│ ├── automata_representation.rst
│ ├── conf.py
│ ├── index.rst
│ ├── installation.rst
│ ├── modules.rst
│ ├── test_AFW.rst
│ ├── test_DFA.rst
│ ├── test_NFA.rst
│ ├── test_automata_IO.rst
│ ├── tutorial.rst
│ ├── unittest.rst
│ └── usage.rst
├── requirements.txt
├── setup.py
└── tests
├── __init__.py
├── context.py
├── dot
├── afw
│ ├── nfa_afw_to_nfa_test_01.dot
│ └── nfa_nfa_to_afw_test_01.dot
├── automata_io
│ ├── automata_io_dfa_imported_intersection.dot
│ ├── automata_io_dfa_importing_intersection.dot
│ ├── automata_io_dfa_importing_no_state.dot
│ ├── automata_io_nfa_dot_importer_test_01.dot
│ ├── automata_io_nfa_imported_intersection.dot
│ └── automata_io_nfa_importer_pydot_nfa_simple.dot
├── dfa
│ ├── dfa_co_reachable_test_01.dot
│ ├── dfa_co_reachable_test_02.dot
│ ├── dfa_co_reachable_test_02_co_reachable.dot
│ ├── dfa_co_reachable_test_03.dot
│ ├── dfa_co_reachable_test_04.dot
│ ├── dfa_co_reachable_test_05.dot
│ ├── dfa_co_reachable_test_06.dot
│ ├── dfa_complementation_test_01.dot
│ ├── dfa_complementation_test_01_complemented.dot
│ ├── dfa_complementation_test_02.dot
│ ├── dfa_complementation_test_02_complemented.dot
│ ├── dfa_complementation_test_03.dot
│ ├── dfa_complementation_test_03_complemented.dot
│ ├── dfa_completion_test_01.dot
│ ├── dfa_completion_test_01_completed.dot
│ ├── dfa_completion_test_02.dot
│ ├── dfa_completion_test_02_completed.dot
│ ├── dfa_completion_test_03.dot
│ ├── dfa_completion_test_03_completed.dot
│ ├── dfa_intersection_1_test_01.dot
│ ├── dfa_intersection_1_test_02.dot
│ ├── dfa_intersection_2_test_01.dot
│ ├── dfa_intersection_2_test_02.dot
│ ├── dfa_minimization_test_01.dot
│ ├── dfa_minimization_test_01_s4.dot
│ ├── dfa_minimization_test_02.dot
│ ├── dfa_minimization_test_03.dot
│ ├── dfa_minimization_test_04.dot
│ ├── dfa_nonemptiness_check_test_01.dot
│ ├── dfa_nonemptiness_check_test_02.dot
│ ├── dfa_projection_test_01.dot
│ ├── dfa_projection_test_02.dot
│ ├── dfa_reachable_test_01.dot
│ ├── dfa_reachable_test_02.dot
│ ├── dfa_reachable_test_02_reachable.dot
│ ├── dfa_reachable_test_03.dot
│ ├── dfa_reachable_test_04.dot
│ ├── dfa_reachable_test_05.dot
│ ├── dfa_renaming_test_01.dot
│ ├── dfa_run_acceptance_test_01.dot
│ ├── dfa_run_acceptance_test_02.dot
│ ├── dfa_run_test_01.dot
│ ├── dfa_run_test_02.dot
│ ├── dfa_trimming_test_01.dot
│ ├── dfa_trimming_test_02.dot
│ ├── dfa_trimming_test_03.dot
│ ├── dfa_trimming_test_04.dot
│ ├── dfa_union_1_test_01.dot
│ ├── dfa_union_1_test_02.dot
│ ├── dfa_union_2_test_01.dot
│ ├── dfa_union_2_test_02.dot
│ ├── dfa_word_acceptance_test_01.dot
│ └── dfa_word_acceptance_test_02.dot
└── nfa
│ ├── nfa_complementation_test_01.dot
│ ├── nfa_determinization_test_01.dot
│ ├── nfa_determinization_test_02.dot
│ ├── nfa_interestingness_test_01.dot
│ ├── nfa_interestingness_test_02.dot
│ ├── nfa_intersection_1_test_01.dot
│ ├── nfa_intersection_2_test_01.dot
│ ├── nfa_nonemptiness_test_01.dot
│ ├── nfa_nonemptiness_test_02.dot
│ ├── nfa_nonuniversality_test_01.dot
│ ├── nfa_nonuniversality_test_02.dot
│ ├── nfa_renaming_test_01.dot
│ ├── nfa_run_acceptance_test_01.dot
│ ├── nfa_union_1_test_01.dot
│ ├── nfa_union_2_test_01.dot
│ ├── nfa_union_3_test_01.dot
│ └── nfa_word_acceptance_test_01.dot
├── json
├── afw
│ ├── afw_afw_to_nfa_test_01.json
│ ├── afw_complementation_test_01.json
│ ├── afw_completion_test_01.json
│ ├── afw_intersection_1_test_01.json
│ ├── afw_intersection_2_test_01.json
│ ├── afw_intersection_3_test_01.json
│ ├── afw_nfa_to_afw_test_01.json
│ ├── afw_nonemptiness_check_test_1.json
│ ├── afw_nonemptiness_check_test_2.json
│ ├── afw_nonuniversality_check_test_1.json
│ ├── afw_nonuniversality_check_test_2.json
│ ├── afw_union_1_test_01.json
│ ├── afw_union_2_test_01.json
│ ├── afw_union_3_test_01.json
│ └── afw_word_acceptance_test_01.json
├── automata_io
│ ├── afw_json_importer_1.json
│ └── automata_io_afw_json_importer_test_01.json
├── dfa
│ ├── dfa_export_to_json_1.json
│ └── dfa_json_importer_01.json
└── nfa
│ └── nfa_json_importer_1.json
├── test_AFW.py
├── test_DFA.py
├── test_NFA.py
└── test_automata_IO.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # PyCharm IDE conf file
2 | /.idea/*
3 |
4 | # Sphynx Documentation
5 | /doc/_build/
6 | /doc/build/
7 |
8 | # Packaging and distribution
9 | /PySimpleAutomata.egg-info/
10 | /build/
11 | /dist/
12 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yaml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | # Required
6 | version: 2
7 |
8 | # Set the version of Python and other tools you might need
9 | build:
10 | os: ubuntu-22.04
11 | tools:
12 | python: "3.7"
13 |
14 | # Build documentation in the docs/ directory with Sphinx
15 | sphinx:
16 | configuration: doc/source/conf.py
17 |
18 | # We recommend specifying your dependencies to enable reproducible builds:
19 | # https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
20 | # python:
21 | # install:
22 | # - requirements: docs/requirements.txt
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2017 Alessio Cecconi
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/PySimpleAutomata/AFW.py:
--------------------------------------------------------------------------------
1 | """
2 | Module to manage AFW (Alternating Finite automaton on Words).
3 |
4 | Formally a AFW (Alternating Finite automaton on Words) is a tuple
5 | :math:`(Σ, S, s0, ρ, F )`, where:
6 |
7 | • Σ is a finite nonempty alphabet;
8 | • S is a finite nonempty set of states;
9 | • :math:`s0 ∈ S` is the initial state (notice that, as in dfas,
10 | we have a unique initial state);
11 | • F ⊆ S is the set of accepting states;
12 | • :math:`ρ : S × Σ → B+(S)` is a transition function.
13 | :math:`B+(X)` be the set of positive Boolean formulas
14 | over a given set X
15 | ex. of :math:`ρ: ρ(s, a) = (s1 ∧ s2) ∨ (s3 ∧ s4)`
16 |
17 | In this module a AFW is defined as follows
18 |
19 | AFW = dict() with the following keys-values:
20 |
21 | • alphabet => set()
22 | • states => set()
23 | • initial_state => 'state_0'
24 | • accepting_states => set()
25 | • transitions => dict(), where
26 | **key** (state ∈ states, action ∈ alphabet)
27 |
28 | **value** [string representing a PYTHON boolean expression
29 | over states; where we also allow the formulas
30 | *True* and *False*]
31 | """
32 |
33 | from PySimpleAutomata import NFA
34 | import itertools
35 | import re
36 | from copy import deepcopy
37 |
38 |
39 | def __recursive_acceptance(afw, state, remaining_word):
40 | """ Recursive call for word acceptance.
41 |
42 | :param dict afw: input AFW;
43 | :param str state: current state;
44 | :param list remaining_word: list containing the remaining
45 | words.
46 | :return: *(bool)*, True if the word is accepted, false
47 | otherwise.
48 | """
49 | # the word is accepted only if all the final states are
50 | # accepting states
51 | if len(remaining_word) == 0:
52 | if state in afw['accepting_states']:
53 | return True
54 | else:
55 | return False
56 |
57 | action = remaining_word[0]
58 | if (state, action) not in afw['transitions']:
59 | return False
60 |
61 | if afw['transitions'][state, action] == 'True':
62 | return True
63 | elif afw['transitions'][state, action] == 'False':
64 | return False
65 |
66 | transition = (state, action)
67 | # extract from the boolean formula of the transition the
68 | # states involved in it
69 | involved_states = list(
70 | set(
71 | re.findall(r"[\w']+", afw['transitions'][transition])
72 | ).difference({'and', 'or', 'True', 'False'})
73 | )
74 | possible_assignments = set(
75 | itertools.product([True, False], repeat=len(involved_states)))
76 | # For all possible assignment of the the transition (a
77 | # boolean formula over the states)
78 | for assignment in possible_assignments:
79 | mapping = dict(zip(involved_states, assignment))
80 | # If the assignment evaluation is positive
81 | if eval(afw['transitions'][transition], mapping):
82 | ok = True
83 | mapping.pop('__builtins__') # removes useless entry
84 | # added by the function eval()
85 |
86 | # Check if the word is accepted in ALL the states
87 | # mapped to True by the assignment
88 | for mapped_state in mapping:
89 | if mapping[mapped_state] == False:
90 | continue
91 | if not __recursive_acceptance(afw,
92 | mapped_state,
93 | remaining_word[1:]):
94 | # if one positive state of the assignment
95 | # doesn't accepts the word,the whole
96 | # assignment is discarded
97 | ok = False
98 | break
99 | if ok:
100 | # If at least one assignment accepts the word,
101 | # the word is accepted by the afw
102 | return True
103 | return False
104 |
105 |
106 | def afw_word_acceptance(afw: dict, word: list) -> bool:
107 | """ Checks if a **word** is accepted by input AFW, returning
108 | True/False.
109 |
110 | The word w is accepted by a AFW if exists at least an
111 | accepting run on w. A run for AFWs is a tree and
112 | an alternating automaton can have multiple runs on a given
113 | input.
114 | A run is accepting if all the leaf nodes are accepting states.
115 |
116 | :param dict afw: input AFW;
117 | :param list word: list of symbols ∈ afw['alphabet'].
118 | :return: *(bool)*, True if the word is accepted, False otherwise.
119 | """
120 | return __recursive_acceptance(afw, afw['initial_state'], word)
121 |
122 |
123 | # Side effect on input afw
124 | def afw_completion(afw):
125 | """ Side effect on input! Complete the afw adding not
126 | present transitions and marking them as False.
127 |
128 | :param dict afw: input AFW.
129 | """
130 |
131 | for state in afw['states']:
132 | for a in afw['alphabet']:
133 | if (state, a) not in afw['transitions']:
134 | afw['transitions'][state, a] = 'False'
135 | return afw
136 |
137 |
138 | def nfa_to_afw_conversion(nfa: dict) -> dict:
139 | """ Returns a AFW reading the same language of input NFA.
140 |
141 | Let :math:`A = (Σ,S,S^0, ρ,F)` be an nfa. Then we define the
142 | afw AA such that :math:`L(AA) = L(A)` as follows
143 | :math:`AA = (Σ, S ∪ {s_0}, s_0 , ρ_A , F )` where :math:`s_0`
144 | is a new state and :math:`ρ_A` is defined as follows:
145 |
146 | • :math:`ρ_A(s, a)= ⋁_{(s,a,s')∈ρ}s'`, for all :math:`a ∈ Σ`
147 | and :math:`s ∈ S`
148 | • :math:`ρ_A(s^0, a)= ⋁_{s∈S^0,(s,a,s')∈ρ}s'`, for all
149 | :math:`a ∈ Σ`
150 |
151 | We take an empty disjunction in the definition of AA to be
152 | equivalent to false. Essentially,
153 | the transitions of A are viewed as disjunctions in AA . A
154 | special treatment is needed for the
155 | initial state, since we allow a set of initial states in
156 | nondeterministic automata, but only a
157 | single initial state in alternating automata.
158 |
159 | :param dict nfa: input NFA.
160 | :return: *(dict)* representing a AFW.
161 | """
162 | afw = {
163 | 'alphabet': nfa['alphabet'].copy(),
164 | 'states': nfa['states'].copy(),
165 | 'initial_state': 'root',
166 | 'accepting_states': nfa['accepting_states'].copy(),
167 | 'transitions': dict()
168 | }
169 |
170 | # Make sure "root" node doesn't already exists, in case rename it
171 | i = 0
172 | while afw['initial_state'] in nfa['states']:
173 | afw['initial_state'] = 'root' + str(i)
174 | i += 1
175 | afw['states'].add(afw['initial_state'])
176 |
177 | for (state, action) in nfa['transitions']:
178 | boolean_formula = str()
179 | for destination in nfa['transitions'][state, action]:
180 | boolean_formula += destination + ' or '
181 | # strip last ' or ' from the formula string
182 | boolean_formula = boolean_formula[0:-4]
183 | afw['transitions'][state, action] = boolean_formula
184 | if state in nfa['initial_states']:
185 | afw['transitions'][afw['initial_state'], action] = boolean_formula
186 |
187 | return afw
188 |
189 |
190 | def afw_to_nfa_conversion(afw: dict) -> dict:
191 | """ Returns a NFA reading the same language of input AFW.
192 |
193 | Let :math:`A = (Σ, S, s^0 , ρ, F )` be an afw. Then we
194 | define the nfa :math:`A_N` such that :math:`L(A_N) = L(A)`
195 | as follows :math:`AN = (Σ, S_N , S^0_N , ρ_N , F_N )` where:
196 |
197 | • :math:`S_N = 2^S`
198 | • :math:`S^0_N= \{\{s^0 \}\}`
199 | • :math:`F_N=2^F`
200 | • :math:`(Q,a,Q') ∈ ρ_N` iff :math:`Q'` satisfies :math:`⋀_{
201 | s∈Q} ρ(s, a)`
202 |
203 | We take an empty conjunction in the definition of
204 | :math:`ρ_N` to be equivalent to true; thus, :math:`(∅, a,
205 | ∅) ∈ ρ_N`.
206 |
207 | :param dict afw: input AFW.
208 | :return: *(dict)* representing a NFA.
209 | """
210 |
211 | nfa = {
212 | 'alphabet': afw['alphabet'].copy(),
213 | 'initial_states': {(afw['initial_state'],)},
214 | 'states': {(afw['initial_state'],)},
215 | 'accepting_states': set(),
216 | 'transitions': dict()
217 | }
218 |
219 | # State of the NFA are composed by the union of more states of the AFW
220 |
221 | boundary = deepcopy(nfa['states'])
222 | possible_assignments = set(
223 | itertools.product([True, False], repeat=len(afw['states'])))
224 |
225 | while boundary:
226 | state = boundary.pop()
227 | # The state is accepting only if composed exclusively of final states
228 | if set(state).issubset(afw['accepting_states']):
229 | nfa['accepting_states'].add(state)
230 |
231 | for action in nfa['alphabet']:
232 | boolean_formula = 'True'
233 | # join the boolean formulas of the single states given the action
234 | for s in state:
235 | if (s, action) not in afw['transitions']:
236 | boolean_formula += ' and False'
237 | else:
238 | boolean_formula += \
239 | ' and (' + \
240 | afw['transitions'][s, action] + \
241 | ')'
242 |
243 | for assignment in possible_assignments:
244 | mapping = dict(zip(afw['states'], assignment))
245 |
246 | # If the formula is satisfied
247 | if eval(boolean_formula, mapping):
248 | # add the transition to the resulting NFA
249 |
250 | evaluation = \
251 | tuple(k for k in mapping if mapping[k] is True)
252 |
253 | if evaluation not in nfa['states']:
254 | nfa['states'].add(evaluation)
255 | boundary.add(evaluation)
256 | nfa['transitions'].setdefault(
257 | (state, action), set()).add(evaluation)
258 |
259 | return nfa
260 |
261 |
262 | def formula_dual(input_formula: str) -> str:
263 | """ Returns the dual of the input formula.
264 |
265 | The dual operation on formulas in :math:`B^+(X)` is defined as:
266 | the dual :math:`\overline{θ}` of a formula :math:`θ` is obtained from θ by
267 | switching :math:`∧` and :math:`∨`, and
268 | by switching :math:`true` and :math:`false`.
269 |
270 | :param str input_formula: original string.
271 | :return: *(str)*, dual of input formula.
272 | """
273 | conversion_dictionary = {
274 | 'and': 'or',
275 | 'or': 'and',
276 | 'True': 'False',
277 | 'False': 'True'
278 | }
279 |
280 | return re.sub(
281 | '|'.join(re.escape(key) for key in conversion_dictionary.keys()),
282 | lambda k: conversion_dictionary[k.group(0)], input_formula)
283 |
284 |
285 | def afw_complementation(afw: dict) -> dict:
286 | """ Returns a AFW reading the complemented language read by
287 | input AFW.
288 |
289 | Let :math:`A = (Σ, S, s^0 , ρ, F )`. Define :math:`Ā = (Σ, S,
290 | s^0 , \overline{ρ}, S − F )`,
291 | where :math:`\overline{ρ}(s, a) = \overline{ρ(s, a)}` for all
292 | :math:`s ∈ S` and :math:`a ∈ Σ`.
293 | That is, :math:`\overline{ρ}` is the dualized transition
294 | function. It can be shown that :math:`L( Ā) = Σ^∗ − L(A)`.
295 |
296 | The input afw need to be completed i.e. each non existing
297 | transition must be added pointing to False.
298 |
299 | :param dict afw: input AFW.
300 | :return: *(dict)* representing a AFW.
301 | """
302 | completed_input = afw_completion(deepcopy(afw))
303 |
304 | complemented_afw = {
305 | 'alphabet': completed_input['alphabet'],
306 | 'states': completed_input['states'],
307 | 'initial_state': completed_input['initial_state'],
308 | 'accepting_states':
309 | completed_input['states'].difference(afw['accepting_states']),
310 | 'transitions': dict()
311 | }
312 |
313 | for transition in completed_input['transitions']:
314 | complemented_afw['transitions'][transition] = \
315 | formula_dual(completed_input['transitions'][transition])
316 | return complemented_afw
317 |
318 |
319 | def __replace_all(repls: dict, input: str) -> str:
320 | """ Replaces from the string **input** all the occurrence of the
321 | keys element of the dictionary **repls** with their relative
322 | value.
323 |
324 | :param dict repls: dictionary containing the mapping
325 | between the values to be changed and their
326 | appropriate substitution;
327 | :param str input: original string.
328 | :return: *(str)*, string with the appropriate values replaced.
329 | """
330 | return re.sub('|'.join(re.escape(key) for key in repls.keys()),
331 | lambda k: repls[k.group(0)], input)
332 |
333 |
334 | # SIDE EFFECTS
335 | def rename_afw_states(afw: dict, suffix: str):
336 | """ Side effect on input! Renames all the states of the AFW
337 | adding a **suffix**.
338 |
339 | It is an utility function used during testing to avoid automata to have
340 | states with names in common.
341 |
342 | Avoid suffix that can lead to special name like "as", "and",...
343 |
344 | :param dict afw: input AFW.
345 | :param str suffix: string to be added at beginning of each state name.
346 | """
347 | conversion_dict = {}
348 | new_states = set()
349 | new_accepting = set()
350 | for state in afw['states']:
351 | conversion_dict[state] = '' + suffix + state
352 | new_states.add('' + suffix + state)
353 | if state in afw['accepting_states']:
354 | new_accepting.add('' + suffix + state)
355 |
356 | afw['states'] = new_states
357 | afw['initial_state'] = '' + suffix + afw['initial_state']
358 | afw['accepting_states'] = new_accepting
359 |
360 | new_transitions = {}
361 | for transition in afw['transitions']:
362 | new_transition = __replace_all(conversion_dict, transition[0])
363 | new_transitions[new_transition, transition[1]] = \
364 | __replace_all(conversion_dict, afw['transitions'][transition])
365 | afw['transitions'] = new_transitions
366 |
367 |
368 | def afw_union(afw_1: dict, afw_2: dict) -> dict:
369 | """ Returns a AFW that reads the union of the languages read
370 | by input AFWs.
371 |
372 | Let :math:`A_1 = (Σ, S_1 , s^0_1, ρ_1 , F_1 )` and :math:`A_2
373 | = (Σ, S_2 , s^0_2, ρ_2 , F_2 )`
374 | be alternating automata accepting the languages :math:`L(
375 | A_1)` and :math:`L(A_2)`.
376 | Then, :math:`B_∪ = (Σ, S_1 ∪ S_2 ∪ {root}, ρ_∪ , root ,
377 | F_1 ∪ F_2 )` with
378 | :math:`ρ_∪ = ρ_1 ∪ ρ_2 ∪ [(root, a): ρ(s^0_1 , a) ∨ ρ(s^0_2 ,
379 | a)]` accepts :math:`L(A_1) ∪ L(A_2)`.
380 |
381 | Pay attention to avoid having the AFWs with state names in common, in case
382 | use :mod:`PySimpleAutomata.AFW.rename_afw_states` function.
383 |
384 | :param dict afw_1: first input AFW;
385 | :param dict afw_2: second input AFW;.
386 | :return: *(dict)* representing the united AFW.
387 | """
388 | # make sure new root state is unique
389 | initial_state = 'root'
390 | i = 0
391 | while initial_state in afw_1['states'] or initial_state in afw_2['states']:
392 | initial_state = 'root' + str(i)
393 | i += 1
394 |
395 | union = {
396 | 'alphabet': afw_1['alphabet'].union(afw_2['alphabet']),
397 | 'states':
398 | afw_1['states'].union(afw_2['states']).union({initial_state}),
399 | 'initial_state': initial_state,
400 | 'accepting_states':
401 | afw_1['accepting_states'].union(afw_2['accepting_states']),
402 | 'transitions': deepcopy(afw_1['transitions'])
403 | }
404 |
405 | # add also afw_2 transitions
406 | union['transitions'].update(afw_2['transitions'])
407 |
408 | # if just one initial state is accepting, so the new one is
409 | if afw_1['initial_state'] in afw_1['accepting_states'] \
410 | or afw_2['initial_state'] in afw_2['accepting_states']:
411 | union['accepting_states'].add(union['initial_state'])
412 |
413 | # copy all transitions of initial states and eventually their conjunction
414 | # into the new initial state
415 | for action in union['alphabet']:
416 | if (afw_1['initial_state'], action) in afw_1['transitions']:
417 | union['transitions'][initial_state, action] = \
418 | '(' + \
419 | afw_1['transitions'][afw_1['initial_state'], action] + \
420 | ')'
421 | if (afw_2['initial_state'], action) in afw_2['transitions']:
422 | union['transitions'][initial_state, action] += \
423 | ' or (' + \
424 | afw_2['transitions'][afw_2['initial_state'], action] + \
425 | ')'
426 | elif (afw_2['initial_state'], action) in afw_2['transitions']:
427 | union['transitions'][initial_state, action] = \
428 | '(' + \
429 | afw_2['transitions'][afw_2['initial_state'], action] + \
430 | ')'
431 |
432 | return union
433 |
434 |
435 | def afw_intersection(afw_1: dict, afw_2: dict) -> dict:
436 | """ Returns a AFW that reads the intersection of the
437 | languages read by input AFWs.
438 |
439 | Let :math:`A_1 = (Σ, S_1 , s^0_1, ρ_1 , F_1 )` and :math:`A_2
440 | = (Σ, S_2 , s^0_2, ρ_2 , F_2 )`
441 | be alternating automata accepting the languages :math:`L(
442 | A_1)` and :math:`L(A_2)`.
443 | Then, :math:`B_∩ = (Σ, S_1 ∪ S_2 ∪ {root}, root, ρ_∩ , F_1 ∪
444 | F_2 )` with
445 | :math:`ρ_∩ = ρ_1 ∪ ρ_2 ∪ [(root, a): ρ(s^0_1 , a) ∧ ρ(s^0_2 ,
446 | a)]` accepts :math:`L(A_1) ∩ L(A_2)`.
447 |
448 | :param dict afw_1: first input AFW;
449 | :param dict afw_2: second input AFW.
450 | :return: *(dict)* representing a AFW.
451 | """
452 | # make sure new root state is unique
453 | initial_state = 'root'
454 | i = 0
455 | while initial_state in afw_1['states'] or initial_state in afw_2['states']:
456 | initial_state = 'root' + str(i)
457 | i += 1
458 |
459 | intersection = {
460 | 'alphabet': afw_1['alphabet'].union(afw_2['alphabet']),
461 | 'states':
462 | afw_1['states'].union(afw_2['states']).union({initial_state}),
463 | 'initial_state': initial_state,
464 | 'accepting_states':
465 | afw_1['accepting_states'].union(afw_2['accepting_states']),
466 | 'transitions': deepcopy(afw_1['transitions'])
467 | }
468 |
469 | # add also afw_2 transitions
470 | intersection['transitions'].update(afw_2['transitions'])
471 |
472 | # if both initial states are accepting, so the new one is
473 | if afw_1['initial_state'] in afw_1['accepting_states'] \
474 | and afw_2['initial_state'] in afw_2['accepting_states']:
475 | intersection['accepting_states'].add(
476 | intersection['initial_state'])
477 |
478 | # New initial state transitions will be the conjunction of
479 | # precedent inital states ones
480 | for action in intersection['alphabet']:
481 | if (afw_1['initial_state'], action) in afw_1['transitions']:
482 | intersection['transitions'][initial_state, action] = \
483 | '(' + \
484 | afw_1['transitions'][afw_1['initial_state'], action] + \
485 | ')'
486 | if (afw_2['initial_state'], action) in afw_2['transitions']:
487 | intersection['transitions'][initial_state, action] += \
488 | ' and (' + \
489 | afw_2['transitions'][afw_2['initial_state'], action] + \
490 | ')'
491 | else:
492 | intersection['transitions'][
493 | initial_state, action] += ' and False'
494 | elif (afw_2['initial_state'], action) in afw_2['transitions']:
495 | intersection['transitions'][initial_state, action] = \
496 | 'False and (' + \
497 | afw_2['transitions'][afw_2['initial_state'], action] + \
498 | ')'
499 |
500 | return intersection
501 |
502 |
503 | def afw_nonemptiness_check(afw: dict) -> bool:
504 | """ Checks if the input AFW reads any language other than the
505 | empty one, returning True/False.
506 |
507 | The afw is translated into a nfa and then its nonemptiness is
508 | checked.
509 |
510 | :param dict afw: input AFW.
511 | :return: *(bool)*, True if input afw is nonempty, False otherwise.
512 | """
513 | nfa = afw_to_nfa_conversion(afw)
514 | return NFA.nfa_nonemptiness_check(nfa)
515 |
516 |
517 | def afw_nonuniversality_check(afw: dict) -> bool:
518 | """ Checks if the language read by the input AFW is different
519 | from Σ∗, returning True/False.
520 |
521 | The afw is translated into a nfa and then its nonuniversality
522 | is checked.
523 |
524 | :param dict afw: input AFW.
525 | :return: *(bool)*, True if input afw is nonuniversal, False
526 | otherwise.
527 | """
528 | nfa = afw_to_nfa_conversion(afw)
529 | return NFA.nfa_nonuniversality_check(nfa)
530 |
--------------------------------------------------------------------------------
/PySimpleAutomata/DFA.py:
--------------------------------------------------------------------------------
1 | """
2 | Module to manage DFA (Deterministic Finite Automaton).
3 |
4 | Formally a DFA, Deterministic Finite Automaton, is a tuple
5 | :math:`A=(Σ,S,s_0,ρ,F)`, where:
6 |
7 | • Σ is a finite nonempty alphabet;
8 | • S is a finite nonempty set of states;
9 | • :math:`s_0 ∈ S` is the initial state;
10 | • F ⊆ S is the set of accepting states;
11 | • :math:`ρ: S × Σ → S` is a transition function, which can be a
12 | partial function.
13 |
14 | Intuitively, :math:`s_0 = ρ(s, a)` is the state that A can
15 | move into when it is in state s and it reads the symbol a.
16 | (If :math:`ρ(s, a)` is undefined then reading a leads to
17 | rejection.)
18 |
19 | In this module a DFA is defined as follows
20 |
21 | DFA = dict() with the following keys-values:
22 | • alphabet => set() ;
23 | • states => set() ;
24 | • initial_state => str() ;
25 | • accepting_states => set() ;
26 | • transitions => dict(), where
27 | **key**: (*state* ∈ states, *action* ∈ alphabet)
28 |
29 | **value**: (*arriving_state* in states).
30 | """
31 |
32 | from copy import deepcopy
33 |
34 |
35 | def dfa_word_acceptance(dfa: dict, word: list) -> bool:
36 | """ Checks if a given **word** is accepted by a DFA,
37 | returning True/false.
38 |
39 | The word w is accepted by a DFA if DFA has an accepting run
40 | on w. Since A is deterministic,
41 | :math:`w ∈ L(A)` if and only if :math:`ρ(s_0 , w) ∈ F` .
42 |
43 | :param dict dfa: input DFA;
44 | :param list word: list of actions ∈ dfa['alphabet'].
45 | :return: *(bool)*, True if the word is accepted, False in the
46 | other case.
47 | """
48 | current_state = dfa['initial_state']
49 | for action in word:
50 | if (current_state, action) in dfa['transitions']:
51 | current_state = dfa['transitions'][current_state, action]
52 | else:
53 | return False
54 |
55 | if current_state in dfa['accepting_states']:
56 | return True
57 | else:
58 | return False
59 |
60 |
61 | # Side effect on input dfa
62 | def dfa_completion(dfa: dict) -> dict:
63 | """ Side effects on input! Completes the DFA assigning to
64 | each state a transition for each letter in the alphabet (if
65 | not already defined).
66 |
67 | We say that a DFA is complete if its transition function
68 | :math:`ρ:S×Σ→S` is a total function, that is,
69 | for all :math:`s ∈ S` and all :math:`a ∈ Σ` we have that
70 | exists a :math:`ρ(s,a)=s_x` for some :math:`s_x ∈ S`.
71 | Given an arbitrary DFA A, its completed version :math:`A_T`
72 | is obtained as follows:
73 | :math:`A_T = (Σ, S ∪ \{sink\}, s_0 , ρ_T , F )` with
74 | :math:`ρ_T(s,a)=sink`
75 | when :math:`ρ(s,a)` is not defined in A and :math:`ρ_T=ρ` in
76 | the other cases.
77 |
78 | :param dict dfa: input DFA.
79 | :return: *(dict)* representing the completed DFA.
80 | """
81 | dfa['states'].add('sink')
82 | for state in dfa['states']:
83 | for action in dfa['alphabet']:
84 | if (state, action) not in dfa['transitions']:
85 | dfa['transitions'][state, action] = 'sink'
86 | return dfa
87 |
88 |
89 | def dfa_complementation(dfa: dict) -> dict:
90 | """ Returns a DFA that accepts any word but he ones accepted
91 | by the input DFA.
92 |
93 | Let A be a completed DFA, :math:`Ā = (Σ, S, s_0 , ρ, S − F )`
94 | is the DFA that runs A but accepts whatever word A does not.
95 |
96 | :param dict dfa: input DFA.
97 | :return: *(dict)* representing the complement of the input DFA.
98 | """
99 | dfa_complement = dfa_completion(deepcopy(dfa))
100 | dfa_complement['accepting_states'] = \
101 | dfa_complement['states'].difference(dfa_complement['accepting_states'])
102 | return dfa_complement
103 |
104 |
105 | def dfa_intersection(dfa_1: dict, dfa_2: dict) -> dict:
106 | """ Returns a DFA accepting the intersection of the DFAs in
107 | input.
108 |
109 | Let :math:`A_1 = (Σ, S_1 , s_{01} , ρ_1 , F_1 )` and
110 | :math:`A_2 = (Σ, S_2 , s_{02} , ρ_2 , F_2 )` be two DFAs.
111 | Then there is a DFA :math:`A_∧` that runs simultaneously both
112 | :math:`A_1` and :math:`A_2` on the input word and
113 | accepts when both accept.
114 | It is defined as:
115 |
116 | :math:`A_∧ = (Σ, S_1 × S_2 , (s_{01} , s_{02} ), ρ, F_1 × F_2 )`
117 |
118 | where
119 |
120 | :math:`ρ((s_1 , s_2 ), a) = (s_{X1} , s_{X2} )` iff
121 | :math:`s_{X1} = ρ_1 (s_1 , a)` and :math:`s_{X2}= ρ_2 (s_2 , a)`
122 |
123 | Implementation proposed guarantees the resulting DFA has only
124 | **reachable** states.
125 |
126 | :param dict dfa_1: first input DFA;
127 | :param dict dfa_2: second input DFA.
128 | :return: *(dict)* representing the intersected DFA.
129 | """
130 | intersection = {
131 | 'alphabet': dfa_1['alphabet'].intersection(dfa_2['alphabet']),
132 | 'states': {(dfa_1['initial_state'], dfa_2['initial_state'])},
133 | 'initial_state': (dfa_1['initial_state'], dfa_2['initial_state']),
134 | 'accepting_states': set(),
135 | 'transitions': dict()
136 | }
137 |
138 | boundary = set()
139 | boundary.add(intersection['initial_state'])
140 | while boundary:
141 | (state_dfa_1, state_dfa_2) = boundary.pop()
142 | if state_dfa_1 in dfa_1['accepting_states'] \
143 | and state_dfa_2 in dfa_2['accepting_states']:
144 | intersection['accepting_states'].add((state_dfa_1, state_dfa_2))
145 |
146 | for a in intersection['alphabet']:
147 | if (state_dfa_1, a) in dfa_1['transitions'] \
148 | and (state_dfa_2, a) in dfa_2['transitions']:
149 | next_state_1 = dfa_1['transitions'][state_dfa_1, a]
150 | next_state_2 = dfa_2['transitions'][state_dfa_2, a]
151 | if (next_state_1, next_state_2) not in intersection['states']:
152 | intersection['states'].add((next_state_1, next_state_2))
153 | boundary.add((next_state_1, next_state_2))
154 | intersection['transitions'][(state_dfa_1, state_dfa_2), a] = \
155 | (next_state_1, next_state_2)
156 |
157 | return intersection
158 |
159 |
160 | def dfa_union(dfa_1: dict, dfa_2: dict) -> dict:
161 | """ Returns a DFA accepting the union of the input DFAs.
162 |
163 | Let :math:`A_1 = (Σ, S_1 , s_{01} , ρ_1 , F_1 )` and
164 | :math:`A_2 = (Σ, S_2 , s_{02} , ρ_2 , F_2 )` be two completed
165 | DFAs.
166 | Then there is a DFA :math:`A_∨` that runs simultaneously both
167 | :math:`A_1` and :math:`A_2` on the input word
168 | and accepts when one of them accepts.
169 | It is defined as:
170 |
171 | :math:`A_∨ = (Σ, S_1 × S_2 , (s_{01} , s_{02} ), ρ, (F_1 ×
172 | S_2 ) ∪ (S_1 × F_2 ))`
173 |
174 | where
175 |
176 | :math:`ρ((s_1 , s_2 ), a) = (s_{X1} , s_{X2} )` iff
177 | :math:`s_{X1} = ρ_1 (s_1 , a)` and :math:`s_{X2} = ρ(s_2 , a)`
178 |
179 | Proposed implementation guarantees resulting DFA has only **reachable**
180 | states.
181 |
182 | :param dict dfa_1: first input DFA;
183 | :param dict dfa_2: second input DFA.
184 | :return: *(dict)* representing the united DFA.
185 | """
186 | dfa_1 = deepcopy(dfa_1)
187 | dfa_2 = deepcopy(dfa_2)
188 | dfa_1['alphabet'] = dfa_2['alphabet'] = dfa_1['alphabet'].union(
189 | dfa_2['alphabet']) # to complete the DFAs over all possible transition
190 | dfa_1 = dfa_completion(dfa_1)
191 | dfa_2 = dfa_completion(dfa_2)
192 |
193 | union = {
194 | 'alphabet': dfa_1['alphabet'].copy(),
195 | 'states': {(dfa_1['initial_state'], dfa_2['initial_state'])},
196 | 'initial_state': (dfa_1['initial_state'], dfa_2['initial_state']),
197 | 'accepting_states': set(),
198 | 'transitions': dict()
199 | }
200 |
201 | boundary = set()
202 | boundary.add(union['initial_state'])
203 | while boundary:
204 | (state_dfa_1, state_dfa_2) = boundary.pop()
205 | if state_dfa_1 in dfa_1['accepting_states'] \
206 | or state_dfa_2 in dfa_2['accepting_states']:
207 | union['accepting_states'].add((state_dfa_1, state_dfa_2))
208 | for a in union['alphabet']:
209 | # as DFAs are completed they surely have the transition
210 | next_state_1 = dfa_1['transitions'][state_dfa_1, a]
211 | next_state_2 = dfa_2['transitions'][state_dfa_2, a]
212 | if (next_state_1, next_state_2) not in union['states']:
213 | union['states'].add((next_state_1, next_state_2))
214 | boundary.add((next_state_1, next_state_2))
215 | union['transitions'][(state_dfa_1, state_dfa_2), a] = \
216 | (next_state_1, next_state_2)
217 |
218 | return union
219 |
220 |
221 | def dfa_minimization(dfa: dict) -> dict:
222 | """ Returns the minimization of the DFA in input through a
223 | greatest fix-point method.
224 |
225 | Given a completed DFA :math:`A = (Σ, S, s_0 , ρ, F )` there
226 | exists a single minimal DFA :math:`A_m`
227 | which is equivalent to A, i.e. reads the same language
228 | :math:`L(A) = L(A_m)` and with a minimal number of states.
229 | To construct such a DFA we exploit bisimulation as a suitable
230 | equivalence relation between states.
231 |
232 | A bisimulation relation :math:`E ∈ S × S` is a relation
233 | between states that satisfies the following condition:
234 | if :math:`(s, t) ∈ E` then:
235 |
236 | • s ∈ F iff t ∈ F;
237 | • For all :math:`(s_X,a)` such that :math:`ρ(s, a) = s_X`,
238 | there exists :math:`t_X` such that :math:`ρ(t, a) = t_X`
239 | and :math:`(s_X , t_X ) ∈ E`;
240 | • For all :math:`(t_X,a)` such that :math:`ρ(t, a) = t_X` ,
241 | there exists :math:`s_X` such that :math:`ρ(s, a) = s_X`
242 | and :math:`(s_X , t_X ) ∈ E`.
243 |
244 | :param dict dfa: input DFA.
245 | :return: *(dict)* representing the minimized DFA.
246 | """
247 | dfa = dfa_completion(deepcopy(dfa))
248 |
249 | ################################################################
250 | ### Greatest-fixpoint
251 |
252 | z_current = set()
253 | z_next = set()
254 |
255 | # First bisimulation condition check (can be done just once)
256 | # s ∈ F iff t ∈ F
257 | for state_s in dfa['states']:
258 | for state_t in dfa['states']:
259 | if (
260 | state_s in dfa['accepting_states']
261 | and state_t in dfa['accepting_states']
262 | ) or (
263 | state_s not in dfa['accepting_states']
264 | and state_t not in dfa['accepting_states']
265 | ):
266 | z_next.add((state_s, state_t))
267 |
268 | # Second and third condition of bisimularity check
269 | while z_current != z_next:
270 | z_current = z_next
271 | z_next = z_current.copy()
272 | for (state_1, state_2) in z_current:
273 | # for all s0,a s.t. ρ(s, a) = s_0 , there exists t 0
274 | # s.t. ρ(t, a) = t 0 and (s_0 , t 0 ) ∈ Z i ;
275 | for a in dfa['alphabet']:
276 | if (state_1, a) in dfa['transitions'] \
277 | and (state_2, a) in dfa['transitions']:
278 | if (
279 | dfa['transitions'][state_1, a],
280 | dfa['transitions'][state_2, a]
281 | ) not in z_current:
282 | z_next.remove((state_1, state_2))
283 | break
284 | else:
285 | # action a not possible in state element[0]
286 | # or element[1]
287 | z_next.remove((state_1, state_2))
288 | break
289 |
290 | ################################################################
291 | ### Equivalence Sets
292 |
293 | equivalence = dict()
294 | for (state_1, state_2) in z_current:
295 | equivalence.setdefault(state_1, set()).add(state_2)
296 |
297 | ################################################################
298 | ### Minimal DFA construction
299 |
300 | dfa_min = {
301 | 'alphabet': dfa['alphabet'].copy(),
302 | 'states': set(),
303 | 'initial_state': dfa['initial_state'],
304 | 'accepting_states': set(),
305 | 'transitions': dfa['transitions'].copy()
306 | }
307 |
308 | # select one element for each equivalence set
309 | for equivalence_set in equivalence.values():
310 | if dfa_min['states'].isdisjoint(equivalence_set):
311 | e = equivalence_set.pop()
312 | dfa_min['states'].add(e) # TODO highlight this instruction
313 | equivalence_set.add(e)
314 |
315 | dfa_min['accepting_states'] = \
316 | dfa_min['states'].intersection(dfa['accepting_states'])
317 |
318 | for t in dfa['transitions']:
319 | if t[0] not in dfa_min['states']:
320 | dfa_min['transitions'].pop(t)
321 | elif dfa['transitions'][t] not in dfa_min['states']:
322 | dfa_min['transitions'][t] = \
323 | equivalence[dfa['transitions'][t]]. \
324 | intersection(dfa_min['states']).pop()
325 |
326 | return dfa_min
327 |
328 |
329 | # Side effects on input variable
330 | def dfa_reachable(dfa: dict) -> dict:
331 | """ Side effects on input! Removes unreachable states from a
332 | DFA and returns the pruned DFA.
333 |
334 | It is possible to remove from a DFA A all unreachable states
335 | from the initial state without altering the language.
336 | The reachable DFA :math:`A_R` corresponding to A is defined as:
337 |
338 | :math:`A_R = (Σ, S_R , s_0 , ρ|S_R , F ∩ S_R )`
339 |
340 | where
341 |
342 | • :math:`S_R` set of reachable state from the initial one
343 | • :math:`ρ|S_R` is the restriction on :math:`S_R × Σ` of ρ.
344 |
345 | :param dict dfa: input DFA.
346 | :return: *(dict)* representing the pruned DFA.
347 | """
348 | reachable_states = set() # set of reachable states from root
349 | boundary = set()
350 | reachable_states.add(dfa['initial_state'])
351 | boundary.add(dfa['initial_state'])
352 |
353 | while boundary:
354 | s = boundary.pop()
355 | for a in dfa['alphabet']:
356 | if (s, a) in dfa['transitions']:
357 | if dfa['transitions'][s, a] not in reachable_states:
358 | reachable_states.add(dfa['transitions'][s, a])
359 | boundary.add(dfa['transitions'][s, a])
360 | dfa['states'] = reachable_states
361 | dfa['accepting_states'] = \
362 | dfa['accepting_states'].intersection(dfa['states'])
363 |
364 | transitions = dfa[
365 | 'transitions'].copy() # TODO why copy? because for doesn't cycle
366 | # mutable set....
367 | for t in transitions:
368 | if t[0] not in dfa['states']:
369 | dfa['transitions'].pop(t)
370 | elif dfa['transitions'][t] not in dfa['states']:
371 | dfa['transitions'].pop(t)
372 |
373 | return dfa
374 |
375 |
376 | # Side effects on input variable
377 | def dfa_co_reachable(dfa: dict) -> dict:
378 | """ Side effects on input! Removes from the DFA all states that
379 | do not reach a final state and returns the pruned DFA.
380 |
381 | It is possible to remove from a DFA A all states that do not
382 | reach a final state without altering the language.
383 | The co-reachable dfa :math:`A_F` corresponding to A is
384 | defined as:
385 |
386 | :math:`A_F = (Σ, S_F , s_0 , ρ|S_F , F )`
387 |
388 | where
389 |
390 | • :math:`S_F` is the set of states that reach a final state
391 | • :math:`ρ|S_F` is the restriction on :math:`S_F × Σ` of ρ.
392 |
393 | :param dict dfa: input DFA.
394 | :return: *(dict)* representing the pruned DFA.
395 | """
396 |
397 | co_reachable_states = dfa['accepting_states'].copy()
398 | boundary = co_reachable_states.copy()
399 |
400 | # inverse transition function
401 | inverse_transitions = dict()
402 | for key, value in dfa['transitions'].items():
403 | inverse_transitions.setdefault(value, set()).add(key)
404 |
405 | while boundary:
406 | s = boundary.pop()
407 | if s in inverse_transitions:
408 | for (state, action) in inverse_transitions[s]:
409 | if state not in co_reachable_states:
410 | boundary.add(state)
411 | co_reachable_states.add(state)
412 |
413 | dfa['states'] = co_reachable_states
414 |
415 | # If not s_0 ∈ S_F the resulting dfa is empty
416 | if dfa['initial_state'] not in dfa['states']:
417 | dfa = {
418 | 'alphabet': set(),
419 | 'states': set(),
420 | 'initial_state': None,
421 | 'accepting_states': set(),
422 | 'transitions': dict()
423 | }
424 | return dfa
425 |
426 | transitions = dfa['transitions'].copy()
427 | for t in transitions:
428 | if t[0] not in dfa['states']:
429 | dfa['transitions'].pop(t)
430 | elif dfa['transitions'][t] not in dfa['states']:
431 | dfa['transitions'].pop(t)
432 |
433 | return dfa
434 |
435 |
436 | # Side effects on input variable
437 | def dfa_trimming(dfa: dict) -> dict:
438 | """ Side effects on input! Returns the DFA in input trimmed,
439 | so both reachable and co-reachable.
440 |
441 | Given a DFA A, the corresponding trimmed DFA contains only
442 | those states that are reachable from the initial state
443 | and that lead to a final state.
444 | The trimmed dfa :math:`A_{RF}` corresponding to A is defined as
445 |
446 | :math:`A_{RF} = (Σ, S_R ∩ S_F , s_0 , ρ|S_R∩S_F , F ∩ S_R )`
447 |
448 | where
449 |
450 | • :math:`S_R` set of reachable states from the initial state
451 | • :math:`S_F` set of states that reaches a final state
452 | • :math:`ρ|S_R∩S_F` is the restriction on :math:`(S_R ∩ S_F )
453 | × Σ` of ρ.
454 |
455 | :param dict dfa: input DFA.
456 | :return: *(dict)* representing the trimmed input DFA.
457 | """
458 | # Reachable DFA
459 | dfa = dfa_reachable(dfa)
460 | # Co-reachable DFA
461 | dfa = dfa_co_reachable(dfa)
462 | # trimmed DFA
463 | return dfa
464 |
465 |
466 | def dfa_projection(dfa: dict, symbols_to_remove: set) -> dict:
467 | """ Returns a NFA that reads the language recognized by the
468 | input DFA where all the symbols in **symbols_to_project**
469 | are projected out of the alphabet.
470 |
471 | Projection in a DFA is the operation that existentially
472 | removes from a word all occurrence of symbols in a set X.
473 | Given a dfa :math:`A = (Σ, S, s_0 , ρ, F )`, we can define an
474 | NFA :math:`A_{πX}`
475 | that recognizes the language :math:`πX(L(A))` as
476 |
477 | :math:`A_{πX}= ( Σ−X, S, S_0 , ρ_X , F )`
478 |
479 | where
480 |
481 | • :math:`S_0 = \{s | (s_0 , s) ∈ ε_X \}`
482 | • :math:`(s,a,s_y ) ∈ ρ_X` iff there exist :math:`(t, t_y)` s.t.
483 | :math:`(s,t) ∈ ε_X , t_y = ρ(t,a)` and :math:`(t_y , s_y )
484 | ∈ ε_X`
485 |
486 | :param dict dfa: input DFA;
487 | :param set symbols_to_remove: set containing symbols ∈ dfa[
488 | 'alphabet'] to be projected out from DFA.
489 | :return: *(dict)* representing a NFA.
490 | """
491 | nfa = {
492 | 'alphabet': dfa['alphabet'].difference(symbols_to_remove),
493 | 'states': dfa['states'].copy(),
494 | 'initial_states': {dfa['initial_state']},
495 | 'accepting_states': dfa['accepting_states'].copy(),
496 | 'transitions': dict()
497 | }
498 |
499 | current_nfa_transitions = None
500 | current_e_x = None
501 | e_x = dict() # equivalence relation dictionary
502 |
503 | # while no more changes are possible
504 | while current_nfa_transitions != nfa['transitions'] or current_e_x != e_x:
505 | current_nfa_transitions = nfa['transitions'].copy()
506 | current_e_x = deepcopy(e_x)
507 | for (state, a) in dfa['transitions']:
508 | next_state = dfa['transitions'][state, a]
509 | if a in symbols_to_remove:
510 | # mark next_state as equivalent to state
511 | e_x.setdefault(state, set()).add(next_state)
512 |
513 | app_set = set()
514 | for equivalent in e_x[state]:
515 | # mark states equivalent to next_states also to state
516 | if equivalent in e_x:
517 | app_set.update(e_x[equivalent])
518 | # add all transitions of equivalent states to state
519 | for act in nfa['alphabet']:
520 | if (equivalent, act) in dfa['transitions']:
521 | equivalent_next = dfa['transitions'][
522 | equivalent, act]
523 | nfa['transitions'].setdefault(
524 | (state, act), set()).add(equivalent_next)
525 | # if equivalent_next has equivalent states
526 | if equivalent_next in e_x:
527 | # the transition leads also to these states
528 | nfa['transitions'][state, act].update(
529 | e_x[equivalent_next])
530 | e_x[state].update(app_set)
531 | else:
532 | # add the transition to the NFA
533 | nfa['transitions'].setdefault((state, a), set()).add(
534 | next_state)
535 | # if next_state has equivalent states
536 | if next_state in e_x:
537 | # the same transition arrive also to all these other states
538 | nfa['transitions'][state, a].update(e_x[next_state])
539 |
540 | # Add all state equivalent to the initial one to NFA initial states set
541 | if dfa['initial_state'] in e_x:
542 | nfa['initial_states'].update(e_x[dfa['initial_state']])
543 |
544 | return nfa
545 |
546 |
547 | def dfa_nonemptiness_check(dfa: dict) -> bool:
548 | """ Checks if the input DFA is nonempty (i.e. if it recognizes a
549 | language except the empty one), returning True/False.
550 |
551 | An automaton A is nonempty if :math:`L(A) ≠ ∅`. L(A) is
552 | nonempty iff there are states :math:`s_0 and t ∈ F` such
553 | that t is connected to :math:`s_0`. Thus, automata
554 | nonemptiness is equivalent to graph reachability, where a
555 | breadth-first-search algorithm can construct in linear time
556 | the set of all states connected to initial state
557 | :math:`s_0`.
558 | A is nonempty iff this set intersects F nontrivially.
559 |
560 | :param dict dfa: input DFA.
561 | :return: *(bool)*, True if the DFA is nonempty, False otherwise
562 | """
563 | # BFS
564 | queue = [dfa['initial_state']]
565 | visited = set()
566 | visited.add(dfa['initial_state'])
567 | while queue:
568 | state = queue.pop(0) # TODO note that this pop is applied to a list
569 | # not like in sets
570 | visited.add(state)
571 | for a in dfa['alphabet']:
572 | if (state, a) in dfa['transitions']:
573 | if dfa['transitions'][state, a] in dfa['accepting_states']:
574 | return True
575 | if dfa['transitions'][state, a] not in visited:
576 | queue.append(dfa['transitions'][state, a])
577 | return False
578 |
579 |
580 | # SIDE EFFECTS
581 | def rename_dfa_states(dfa: dict, suffix: str):
582 | """ Side effect on input! Renames all the states of the DFA
583 | adding a **suffix**.
584 |
585 | It is an utility function to be used to avoid automata to have
586 | states with names in common.
587 |
588 | Avoid suffix that can lead to special name like "as", "and",...
589 |
590 | :param dict dfa: input DFA.
591 | :param str suffix: string to be added at beginning of each state name.
592 | """
593 | conversion_dict = dict()
594 | new_states = set()
595 | new_accepting = set()
596 | for state in dfa['states']:
597 | conversion_dict[state] = '' + suffix + state
598 | new_states.add('' + suffix + state)
599 | if state in dfa['accepting_states']:
600 | new_accepting.add('' + suffix + state)
601 |
602 | dfa['states'] = new_states
603 | dfa['initial_state'] = '' + suffix + dfa['initial_state']
604 | dfa['accepting_states'] = new_accepting
605 |
606 | new_transitions = dict()
607 | for transition in dfa['transitions']:
608 | new_transitions[conversion_dict[transition[0]], transition[1]] = \
609 | conversion_dict[dfa['transitions'][transition]]
610 | dfa['transitions'] = new_transitions
611 | return dfa
612 |
--------------------------------------------------------------------------------
/PySimpleAutomata/NFA.py:
--------------------------------------------------------------------------------
1 | """
2 | Module to manage NFA (Nondeterministic Finite Automata).
3 |
4 | Formally a NFA, Nondeterministic Finite Automaton, is a tuple
5 | :math:`(Σ, S, S^0 , ρ, F )`, where
6 |
7 | • Σ is a finite nonempty alphabet;
8 | • S is a finite nonempty set of states;
9 | • :math:`S^0` is the nonempty set of initial states;
10 | • F is the set of accepting states;
11 | • :math:`ρ: S × Σ × S` is a transition relation. Intuitively,
12 | :math:`(s, a, s' ) ∈ ρ` states that A can move from s
13 | into s' when it reads the symbol a. It is allowed that
14 | :math:`(s, a, s' ) ∈ ρ` and :math:`(s, a, s'' ) ∈ ρ`
15 | with :math:`S' ≠ S''` .
16 |
17 | In this module a NFA is defined as follows
18 |
19 | NFA = dict() with the following keys-values:
20 |
21 | • alphabet => set() ;
22 | • states => set() ;
23 | • initial_states => set() ;
24 | • accepting_states => set() ;
25 | • transitions => dict(), where
26 | **key**: (state in states, action in alphabet)
27 |
28 | **value**: {set of arriving states in states}.
29 |
30 | """
31 |
32 | from PySimpleAutomata import DFA
33 |
34 |
35 | def nfa_intersection(nfa_1: dict, nfa_2: dict) -> dict:
36 | """ Returns a NFA that reads the intersection of the NFAs in
37 | input.
38 |
39 | Let :math:`A_1 = (Σ,S_1,S_1^0,ρ_1,F_1)` and :math:`A_2 =(Σ,
40 | S_2,S_2^0,ρ_2,F_2)` be two NFAs.
41 | There is a NFA :math:`A_∧` that runs simultaneously both
42 | :math:`A_1` and :math:`A_2` on the input word,
43 | so :math:`L(A_∧) = L(A_1)∩L(A_2)`.
44 | It is defined as:
45 |
46 | :math:`A_∧ = ( Σ , S , S_0 , ρ , F )`
47 |
48 | where
49 |
50 | • :math:`S = S_1 × S_2`
51 | • :math:`S_0 = S_1^0 × S_2^0`
52 | • :math:`F = F_1 × F_2`
53 | • :math:`((s,t), a, (s_X , t_X)) ∈ ρ` iff :math:`(s, a,s_X )
54 | ∈ ρ_1` and :math:`(t, a, t_X ) ∈ ρ_2`
55 |
56 | :param dict nfa_1: first input NFA;
57 | :param dict nfa_2: second input NFA;
58 | :return: *(dict)* representing the intersected NFA.
59 | """
60 | intersection = {
61 | 'alphabet': nfa_1['alphabet'].intersection(nfa_2['alphabet']),
62 | 'states': set(),
63 | 'initial_states': set(),
64 | 'accepting_states': set(),
65 | 'transitions': dict()
66 | }
67 | for init_1 in nfa_1['initial_states']:
68 | for init_2 in nfa_2['initial_states']:
69 | intersection['initial_states'].add((init_1, init_2))
70 |
71 | intersection['states'].update(intersection['initial_states'])
72 |
73 | boundary = set()
74 | boundary.update(intersection['initial_states'])
75 | while boundary:
76 | (state_nfa_1, state_nfa_2) = boundary.pop()
77 | if state_nfa_1 in nfa_1['accepting_states'] \
78 | and state_nfa_2 in nfa_2['accepting_states']:
79 | intersection['accepting_states'].add((state_nfa_1, state_nfa_2))
80 | for a in intersection['alphabet']:
81 | if (state_nfa_1, a) not in nfa_1['transitions'] \
82 | or (state_nfa_2, a) not in nfa_2['transitions']:
83 | continue
84 | s1 = nfa_1['transitions'][state_nfa_1, a]
85 | s2 = nfa_2['transitions'][state_nfa_2, a]
86 |
87 | for destination_1 in s1:
88 | for destination_2 in s2:
89 | next_state = (destination_1, destination_2)
90 | if next_state not in intersection['states']:
91 | intersection['states'].add(next_state)
92 | boundary.add(next_state)
93 | intersection['transitions'].setdefault(
94 | ((state_nfa_1, state_nfa_2), a), set()).add(next_state)
95 | if destination_1 in nfa_1['accepting_states'] \
96 | and destination_2 in nfa_2['accepting_states']:
97 | intersection['accepting_states'].add(next_state)
98 |
99 | return intersection
100 |
101 |
102 | def nfa_union(nfa_1: dict, nfa_2: dict) -> dict:
103 | """ Returns a NFA that reads the union of the NFAs in input.
104 |
105 | Let :math:`A_1 = (Σ,S_1,S_1^0,ρ_1,F_1)` and :math:`A_2 =(Σ,
106 | S_2,S_2^0,ρ_2,F_2)` be two NFAs. here is a NFA
107 | :math:`A_∨` that nondeterministically chooses :math:`A_1` or
108 | :math:`A_2` and runs it on the input word.
109 | It is defined as:
110 |
111 | :math:`A_∨ = (Σ, S, S_0 , ρ, F )`
112 |
113 | where:
114 |
115 | • :math:`S = S_1 ∪ S_2`
116 | • :math:`S_0 = S_1^0 ∪ S_2^0`
117 | • :math:`F = F_1 ∪ F_2`
118 | • :math:`ρ = ρ_1 ∪ ρ_2` , that is :math:`(s, a, s' ) ∈ ρ` if
119 | :math:`[ s ∈ S_1\ and\ (s, a, s' ) ∈ ρ_1 ]` OR :math:`[ s ∈
120 | S_2\ and\ (s, a, s' ) ∈ ρ_2 ]`
121 |
122 | Pay attention to avoid having the NFAs with state names in common, in case
123 | use :mod:`PySimpleAutomata.NFA.rename_nfa_states` function.
124 |
125 | :param dict nfa_1: first input NFA;
126 | :param dict nfa_2: second input NFA.
127 | :return: *(dict)* representing the united NFA.
128 | """
129 | union = {
130 | 'alphabet': nfa_1['alphabet'].union(nfa_2['alphabet']),
131 | 'states': nfa_1['states'].union(nfa_2['states']),
132 | 'initial_states':
133 | nfa_1['initial_states'].union(nfa_2['initial_states']),
134 | 'accepting_states':
135 | nfa_1['accepting_states'].union(nfa_2['accepting_states']),
136 | 'transitions': nfa_1['transitions'].copy()}
137 |
138 | for trans in nfa_2['transitions']:
139 | for elem in nfa_2['transitions'][trans]:
140 | union['transitions'].setdefault(trans, set()).add(elem)
141 |
142 | return union
143 |
144 |
145 | # NFA to DFA
146 | def nfa_determinization(nfa: dict) -> dict:
147 | """ Returns a DFA that reads the same language of the input NFA.
148 |
149 | Let A be an NFA, then there exists a DFA :math:`A_d` such
150 | that :math:`L(A_d) = L(A)`. Intuitively, :math:`A_d`
151 | collapses all possible runs of A on a given input word into
152 | one run over a larger state set.
153 | :math:`A_d` is defined as:
154 |
155 | :math:`A_d = (Σ, 2^S , s_0 , ρ_d , F_d )`
156 |
157 | where:
158 |
159 | • :math:`2^S` , i.e., the state set of :math:`A_d` , consists
160 | of all sets of states S in A;
161 | • :math:`s_0 = S^0` , i.e., the single initial state of
162 | :math:`A_d` is the set :math:`S_0` of initial states of A;
163 | • :math:`F_d = \{Q | Q ∩ F ≠ ∅\}`, i.e., the collection of
164 | sets of states that intersect F nontrivially;
165 | • :math:`ρ_d(Q, a) = \{s' | (s,a, s' ) ∈ ρ\ for\ some\ s ∈ Q\}`.
166 |
167 | :param dict nfa: input NFA.
168 | :return: *(dict)* representing a DFA
169 | """
170 | def state_name(s):
171 | return str(set(sorted(s)))
172 |
173 | dfa = {
174 | 'alphabet': nfa['alphabet'].copy(),
175 | 'initial_state': None,
176 | 'states': set(),
177 | 'accepting_states': set(),
178 | 'transitions': dict()
179 | }
180 |
181 | if len(nfa['initial_states']) > 0:
182 | dfa['initial_state'] = state_name(nfa['initial_states'])
183 | dfa['states'].add(state_name(nfa['initial_states']))
184 |
185 | sets_states = list()
186 | sets_queue = list()
187 | sets_queue.append(nfa['initial_states'])
188 | sets_states.append(nfa['initial_states'])
189 | if len(sets_states[0].intersection(nfa['accepting_states'])) > 0:
190 | dfa['accepting_states'].add(state_name(sets_states[0]))
191 |
192 | while sets_queue:
193 | current_set = sets_queue.pop(0)
194 | for a in dfa['alphabet']:
195 | next_set = set()
196 | for state in current_set:
197 | if (state, a) in nfa['transitions']:
198 | for next_state in nfa['transitions'][state, a]:
199 | next_set.add(next_state)
200 | if len(next_set) == 0:
201 | continue
202 | if next_set not in sets_states:
203 | sets_states.append(next_set)
204 | sets_queue.append(next_set)
205 | dfa['states'].add(state_name(next_set))
206 | if next_set.intersection(nfa['accepting_states']):
207 | dfa['accepting_states'].add(state_name(next_set))
208 |
209 | dfa['transitions'][state_name(current_set), a] = state_name(next_set)
210 |
211 | return dfa
212 |
213 |
214 | def nfa_complementation(nfa: dict) -> dict:
215 | """ Returns a DFA reading the complemented language read by
216 | input NFA.
217 |
218 | Complement a nondeterministic automaton is possible
219 | complementing the determinization of it.
220 | The construction is effective, but it involves an exponential
221 | blow-up, since determinization involves an unavoidable
222 | exponential blow-up (i.e., if NFA has n states,
223 | then the DFA has :math:`2^n` states).
224 |
225 | :param dict nfa: input NFA.
226 | :return: *(dict)* representing a completed DFA.
227 | """
228 | determinized_nfa = nfa_determinization(nfa)
229 | return DFA.dfa_complementation(determinized_nfa)
230 |
231 |
232 | def nfa_nonemptiness_check(nfa: dict) -> bool:
233 | """ Checks if the input NFA reads any language other than the
234 | empty one, returning True/False.
235 |
236 | The language L(A) recognized by the automaton A is nonempty iff
237 | there are states :math:`s ∈ S_0` and :math:`t ∈ F` such that
238 | t is connected to s.
239 | Thus, automata nonemptiness is equivalent to graph reachability.
240 |
241 | A breadth-first-search algorithm can construct in linear time
242 | the set of all states connected to a state in :math:`S_0`. A
243 | is nonempty iff this set intersects F nontrivially.
244 |
245 | :param dict nfa: input NFA.
246 | :return: *(bool)*, True if the input nfa is nonempty, False
247 | otherwise.
248 | """
249 | # BFS
250 | queue = list()
251 | visited = set()
252 | for state in nfa['initial_states']:
253 | visited.add(state)
254 | queue.append(state)
255 | while queue:
256 | state = queue.pop(0)
257 | visited.add(state)
258 | for a in nfa['alphabet']:
259 | if (state, a) in nfa['transitions']:
260 | for next_state in nfa['transitions'][state, a]:
261 | if next_state in nfa['accepting_states']:
262 | return True
263 | if next_state not in visited:
264 | queue.append(next_state)
265 | return False
266 |
267 |
268 | def nfa_nonuniversality_check(nfa: dict) -> bool:
269 | """ Checks if the language read by the input NFA is different
270 | from Σ∗ (i.e. contains all possible words), returning
271 | True/False.
272 |
273 | To test nfa A for nonuniversality, it suffices to test Ā (
274 | complementary automaton of A) for nonemptiness
275 |
276 | :param dict nfa: input NFA.
277 | :return: *(bool)*, True if input nfa is nonuniversal,
278 | False otherwise.
279 | """
280 | # NAIVE Very inefficient (exponential space) : simply
281 | # construct Ā and then test its nonemptiness
282 | complemented_nfa = nfa_complementation(nfa)
283 | return DFA.dfa_nonemptiness_check(complemented_nfa)
284 |
285 | # EFFICIENT:
286 | # construct Ā “on-the-fly”: whenever the nonemptiness
287 | # algorithm wants to move from a state t_1 of Ā to a state t_2,
288 | # the algorithm guesses t_2 and checks that it is directly
289 | # connected to t_1 . Once this has been verified,
290 | # the algorithm can discard t_1 .
291 |
292 |
293 | def nfa_interestingness_check(nfa: dict) -> bool:
294 | """ Checks if the input NFA is interesting, returning True/False.
295 |
296 | An automaton is “interesting” if it defines an “interesting”
297 | language,
298 | i.e., a language that is neither empty nor contains all
299 | possible words.
300 |
301 | :param dict nfa: input NFA.
302 | :return: *(bool)*, True if the input nfa is interesting, False
303 | otherwise.
304 | """
305 | return nfa_nonemptiness_check(nfa) and nfa_nonuniversality_check(nfa)
306 |
307 |
308 | def nfa_word_acceptance(nfa: dict, word: list) -> bool:
309 | """ Checks if a given word is accepted by a NFA.
310 |
311 | The word w is accepted by a NFA if exists at least an
312 | accepting run on w.
313 |
314 | :param dict nfa: input NFA;
315 | :param list word: list of symbols ∈ nfa['alphabet'];
316 | :return: *(bool)*, True if the word is accepted, False otherwise.
317 | """
318 | current_level = set()
319 | current_level = current_level.union(nfa['initial_states'])
320 | next_level = set()
321 | for action in word:
322 | for state in current_level:
323 | if (state, action) in nfa['transitions']:
324 | next_level.update(nfa['transitions'][state, action])
325 | if len(next_level) < 1:
326 | return False
327 | current_level = next_level
328 | next_level = set()
329 |
330 | if current_level.intersection(nfa['accepting_states']):
331 | return True
332 | else:
333 | return False
334 |
335 |
336 | # SIDE EFFECTS
337 | def rename_nfa_states(nfa: dict, suffix: str):
338 | """ Side effect on input! Renames all the states of the NFA
339 | adding a **suffix**.
340 |
341 | It is an utility function to be used to avoid automata to have
342 | states with names in common.
343 |
344 | Avoid suffix that can lead to special name like "as", "and",...
345 |
346 | :param dict nfa: input NFA.
347 | :param str suffix: string to be added at beginning of each state name.
348 | """
349 | conversion_dict = {}
350 | new_states = set()
351 | new_initials = set()
352 | new_accepting = set()
353 | for state in nfa['states']:
354 | conversion_dict[state] = '' + suffix + state
355 | new_states.add('' + suffix + state)
356 | if state in nfa['initial_states']:
357 | new_initials.add('' + suffix + state)
358 | if state in nfa['accepting_states']:
359 | new_accepting.add('' + suffix + state)
360 |
361 | nfa['states'] = new_states
362 | nfa['initial_states'] = new_initials
363 | nfa['accepting_states'] = new_accepting
364 |
365 | new_transitions = {}
366 | for transition in nfa['transitions']:
367 | new_arrival = set()
368 | for arrival in nfa['transitions'][transition]:
369 | new_arrival.add(conversion_dict[arrival])
370 | new_transitions[
371 | conversion_dict[transition[0]], transition[1]] = new_arrival
372 | nfa['transitions'] = new_transitions
373 | return nfa
374 |
--------------------------------------------------------------------------------
/PySimpleAutomata/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Oneiroe/PySimpleAutomata/4b1e9818a553f8e5a30d40a1c681a00a12c9b072/PySimpleAutomata/__init__.py
--------------------------------------------------------------------------------
/PySimpleAutomata/automata_IO.py:
--------------------------------------------------------------------------------
1 | """
2 | Module to mange IO
3 | """
4 |
5 | import json
6 | import graphviz
7 | import pydot
8 | import re
9 | import os
10 |
11 |
12 | def __replace_all(repls: dict, input: str) -> str:
13 | """ Replaces from a string **input** all the occurrences of some
14 | symbols according to mapping **repls**.
15 |
16 | :param dict repls: where #key is the old character and
17 | #value is the one to substitute with;
18 | :param str input: original string where to apply the
19 | replacements;
20 | :return: *(str)* the string with the desired characters replaced
21 | """
22 | return re.sub('|'.join(re.escape(key) for key in repls.keys()),
23 | lambda k: repls[k.group(0)], input)
24 |
25 |
26 | ####################################################################
27 | # DFA ##############################################################
28 |
29 | def dfa_json_importer(input_file: str) -> dict:
30 | """ Imports a DFA from a JSON file.
31 |
32 | :param str input_file: path + filename to json file;
33 | :return: *(dict)* representing a DFA.
34 | """
35 | file = open(input_file)
36 | json_file = json.load(file)
37 |
38 | transitions = {} # key [state ∈ states, action ∈ alphabet]
39 | # value [arriving state ∈ states]
40 | for (origin, action, destination) in json_file['transitions']:
41 | transitions[origin, action] = destination
42 |
43 | dfa = {
44 | 'alphabet': set(json_file['alphabet']),
45 | 'states': set(json_file['states']),
46 | 'initial_state': json_file['initial_state'],
47 | 'accepting_states': set(json_file['accepting_states']),
48 | 'transitions': transitions
49 | }
50 | return dfa
51 |
52 |
53 | def dfa_to_json(dfa: dict, name: str, path: str = './'):
54 | """ Exports a DFA to a JSON file.
55 |
56 | If *path* do not exists, it will be created.
57 |
58 | :param dict dfa: DFA to export;
59 | :param str name: name of the output file;
60 | :param str path: path where to save the JSON file (default:
61 | working directory)
62 | """
63 | out = {
64 | 'alphabet': list(dfa['alphabet']),
65 | 'states': list(dfa['states']),
66 | 'initial_state': dfa['initial_state'],
67 | 'accepting_states': list(dfa['accepting_states']),
68 | 'transitions': list()
69 | }
70 |
71 | for t in dfa['transitions']:
72 | out['transitions'].append(
73 | [t[0], t[1], dfa['transitions'][t]])
74 |
75 | if not os.path.exists(path):
76 | os.makedirs(path)
77 | file = open(os.path.join(path, name + '.json'), 'w')
78 | json.dump(out, file, sort_keys=True, indent=4)
79 | file.close()
80 |
81 |
82 | def dfa_dot_importer(input_file: str) -> dict:
83 | """ Imports a DFA from a DOT file.
84 |
85 | Of DOT files are recognized the following attributes:
86 |
87 | • nodeX shape=doublecircle -> accepting node;
88 | • nodeX root=true -> initial node;
89 | • edgeX label="a" -> action in alphabet;
90 | • fake [style=invisible] -> dummy invisible node pointing
91 | to initial state (they will be skipped);
92 | • fake-> S [style=bold] -> dummy transition to draw the arrow
93 | pointing to initial state (it will be skipped).
94 |
95 | Forbidden names:
96 |
97 | • 'fake' used for graphical purpose to drawn the arrow of
98 | the initial state;
99 | • 'sink' used as additional state when completing a DFA;
100 | • 'None' used when no initial state is present.
101 |
102 | Forbidden characters:
103 | • "
104 | • '
105 | • (
106 | • )
107 | • spaces
108 |
109 | :param str input_file: path to the DOT file;
110 | :return: *(dict)* representing a DFA.
111 | """
112 |
113 | # pyDot Object
114 | g = pydot.graph_from_dot_file(input_file)[0]
115 |
116 | states = set()
117 | initial_state = None
118 | accepting_states = set()
119 |
120 | replacements = {'"': '', "'": '', '(': '', ')': '', ' ': ''}
121 | for node in g.get_nodes():
122 | if node.get_name() == 'fake' \
123 | or node.get_name() == 'None' \
124 | or node.get_name() == 'graph' \
125 | or node.get_name() == 'node':
126 | continue
127 | if 'style' in node.get_attributes() \
128 | and node.get_attributes()['style'] == 'invisible':
129 | continue
130 | node_reference = __replace_all(replacements,
131 | node.get_name()).split(',')
132 | if len(node_reference) > 1:
133 | node_reference = tuple(node_reference)
134 | else:
135 | node_reference = node_reference[0]
136 | states.add(node_reference)
137 | for attribute in node.get_attributes():
138 | if attribute == 'root':
139 | initial_state = node_reference
140 | if attribute == 'shape' and node.get_attributes()[
141 | 'shape'] == 'doublecircle':
142 | accepting_states.add(node_reference)
143 |
144 | alphabet = set()
145 | transitions = {}
146 | for edge in g.get_edges():
147 | if edge.get_source() == 'fake':
148 | continue
149 | label = __replace_all(replacements, edge.get_label())
150 | alphabet.add(label)
151 | source = __replace_all(replacements,
152 | edge.get_source()).split(',')
153 | if len(source) > 1:
154 | source = tuple(source)
155 | else:
156 | source = source[0]
157 | destination = __replace_all(replacements,
158 | edge.get_destination()).split(',')
159 | if len(destination) > 1:
160 | destination = tuple(destination)
161 | else:
162 | destination = destination[0]
163 | transitions[source, label] = destination
164 |
165 | dfa = {
166 | 'alphabet': alphabet,
167 | 'states': states,
168 | 'initial_state': initial_state,
169 | 'accepting_states': accepting_states,
170 | 'transitions': transitions}
171 | return dfa
172 |
173 |
174 | def dfa_to_dot(dfa: dict, name: str, path: str = './', direction='TB', engine='dot'):
175 | """ Generates a DOT file and a relative SVG image in **path**
176 | folder of the input DFA using graphviz library.
177 |
178 | :param dict dfa: DFA to export;
179 | :param str name: name of the output file;
180 | :param str path: path where to save the DOT/SVG files (default:
181 | working directory);
182 | :param str direction: direction of graph (default:
183 | TB for vertical).
184 | :param str engine: Graphviz layout engine to be used (default: dot)
185 | """
186 | g = graphviz.Digraph(format='svg', engine=engine)
187 | g.graph_attr['rankdir'] = direction
188 | g.node('fake', style='invisible')
189 | for state in dfa['states']:
190 | if state == dfa['initial_state']:
191 | if state in dfa['accepting_states']:
192 | g.node(str(state), root='true',
193 | shape='doublecircle')
194 | else:
195 | g.node(str(state), root='true')
196 | elif state in dfa['accepting_states']:
197 | g.node(str(state), shape='doublecircle')
198 | else:
199 | g.node(str(state))
200 |
201 | g.edge('fake', str(dfa['initial_state']), style='bold')
202 | for transition in dfa['transitions']:
203 | g.edge(str(transition[0]),
204 | str(dfa['transitions'][transition]),
205 | label=transition[1])
206 |
207 | if not os.path.exists(path):
208 | os.makedirs(path)
209 |
210 | g.render(filename=os.path.join(path, name + '.dot'))
211 |
212 |
213 | @NotImplementedError
214 | def dfa_conformance_check(dfa):
215 | """ Checks if the dfa is conformant to the specifications.
216 |
217 | :param dict dfa:
218 | :return: *(Bool)*
219 | """
220 | # check if there are just the right keys
221 | # checks all transition words are in alphabet and viceversa
222 | # checks all transition states are in states and viceversa
223 | # checks for forbidden symbols, words, names, etc
224 | # check iff one and only one initial state
225 | return
226 |
227 |
228 | ####################################################################
229 | # NFA ##############################################################
230 |
231 | def nfa_json_importer(input_file: str) -> dict:
232 | """ Imports a NFA from a JSON file.
233 |
234 | :param str input_file: path+filename to JSON file;
235 | :return: *(dict)* representing a NFA.
236 | """
237 | file = open(input_file)
238 | json_file = json.load(file)
239 |
240 | transitions = {} # key [state in states, action in alphabet]
241 | # value [Set of arriving states in states]
242 | for p in json_file['transitions']:
243 | transitions.setdefault((p[0], p[1]), set()).add(p[2])
244 |
245 | nfa = {
246 | 'alphabet': set(json_file['alphabet']),
247 | 'states': set(json_file['states']),
248 | 'initial_states': set(json_file['initial_states']),
249 | 'accepting_states': set(json_file['accepting_states']),
250 | 'transitions': transitions
251 | }
252 |
253 | return nfa
254 |
255 |
256 | def nfa_to_json(nfa: dict, name: str, path: str = './'):
257 | """ Exports a NFA to a JSON file.
258 |
259 | :param dict nfa: NFA to export;
260 | :param str name: name of the output file;
261 | :param str path: path where to save the JSON file (default:
262 | working directory).
263 | """
264 | transitions = list() # key[state in states, action in alphabet]
265 | # value [Set of arriving states in states]
266 | for p in nfa['transitions']:
267 | for dest in nfa['transitions'][p]:
268 | transitions.append([p[0], p[1], dest])
269 |
270 | out = {
271 | 'alphabet': list(nfa['alphabet']),
272 | 'states': list(nfa['states']),
273 | 'initial_states': list(nfa['initial_states']),
274 | 'accepting_states': list(nfa['accepting_states']),
275 | 'transitions': transitions
276 | }
277 |
278 | if not os.path.exists(path):
279 | os.makedirs(path)
280 | file = open(os.path.join(path, name + '.json'), 'w')
281 | json.dump(out, file, sort_keys=True, indent=4)
282 | file.close()
283 |
284 |
285 | def nfa_dot_importer(input_file: str) -> dict:
286 | """ Imports a NFA from a DOT file.
287 |
288 | Of .dot files are recognized the following attributes
289 | • nodeX shape=doublecircle -> accepting node;
290 | • nodeX root=true -> initial node;
291 | • edgeX label="a" -> action in alphabet;
292 | • fakeX style=invisible -> dummy invisible nodes pointing
293 | to initial state (it will be skipped);
294 | • fakeX->S [style=bold] -> dummy transitions to draw arrows
295 | pointing to initial states (they will be skipped).
296 |
297 | All invisible nodes are skipped.
298 |
299 | Forbidden names:
300 | • 'fake' used for graphical purpose to drawn the arrow of
301 | the initial state
302 | • 'sink' used as additional state when completing a NFA
303 |
304 | Forbidden characters:
305 | • "
306 | • '
307 | • (
308 | • )
309 | • spaces
310 |
311 | :param str input_file: Path to input DOT file;
312 | :return: *(dict)* representing a NFA.
313 | """
314 |
315 | # pyDot Object
316 | g = pydot.graph_from_dot_file(input_file)[0]
317 |
318 | states = set()
319 | initial_states = set()
320 | accepting_states = set()
321 |
322 | replacements = {'"': '', "'": '', '(': '', ')': '', ' ': ''}
323 |
324 | for node in g.get_nodes():
325 | attributes = node.get_attributes()
326 | if node.get_name() == 'fake' \
327 | or node.get_name() == 'None' \
328 | or node.get_name() == 'graph' \
329 | or node.get_name() == 'node':
330 | continue
331 | if 'style' in attributes \
332 | and attributes['style'] == 'invisible':
333 | continue
334 |
335 | node_reference = __replace_all(replacements,
336 | node.get_name()).split(',')
337 | if len(node_reference) > 1:
338 | node_reference = tuple(node_reference)
339 | else:
340 | node_reference = node_reference[0]
341 | states.add(node_reference)
342 | for attribute in attributes:
343 | if attribute == 'root':
344 | initial_states.add(node_reference)
345 | if attribute == 'shape' \
346 | and attributes['shape'] == 'doublecircle':
347 | accepting_states.add(node_reference)
348 |
349 | alphabet = set()
350 | transitions = {}
351 | for edge in g.get_edges():
352 | source = __replace_all(replacements,
353 | edge.get_source()).split(',')
354 | if len(source) > 1:
355 | source = tuple(source)
356 | else:
357 | source = source[0]
358 | destination = __replace_all(replacements,
359 | edge.get_destination()).split(',')
360 | if len(destination) > 1:
361 | destination = tuple(destination)
362 | else:
363 | destination = destination[0]
364 |
365 | if source not in states or destination not in states:
366 | continue
367 |
368 | label = __replace_all(replacements, edge.get_label())
369 | alphabet.add(label)
370 |
371 | transitions.setdefault((source, label), set()).add(
372 | destination)
373 |
374 | nfa = {
375 | 'alphabet': alphabet,
376 | 'states': states,
377 | 'initial_states': initial_states,
378 | 'accepting_states': accepting_states,
379 | 'transitions': transitions
380 | }
381 |
382 | return nfa
383 |
384 |
385 | def nfa_to_dot(nfa: dict, name: str, path: str = './', direction='TB', engine='dot'):
386 | """ Generates a DOT file and a relative SVG image in **path**
387 | folder of the input NFA using graphviz library.
388 |
389 | :param dict nfa: input NFA;
390 | :param str name: string with the name of the output file;
391 | :param str path: path where to save the DOT/SVG files (default:
392 | working directory);
393 | :param str direction: direction of graph (default:
394 | TB for vertical).
395 | :param str engine: Graphviz layout engine to be used (default: dot)
396 | """
397 | g = graphviz.Digraph(format='svg', engine=engine)
398 | g.graph_attr['rankdir'] = direction
399 |
400 | fakes = []
401 | for i in range(len(nfa['initial_states'])):
402 | fakes.append('fake' + str(i))
403 | g.node('fake' + str(i), style='invisible')
404 |
405 | for state in nfa['states']:
406 | if state in nfa['initial_states']:
407 | if state in nfa['accepting_states']:
408 | g.node(str(state), root='true',
409 | shape='doublecircle')
410 | else:
411 | g.node(str(state), root='true')
412 | elif state in nfa['accepting_states']:
413 | g.node(str(state), shape='doublecircle')
414 | else:
415 | g.node(str(state))
416 |
417 | for initial_state in nfa['initial_states']:
418 | g.edge(fakes.pop(), str(initial_state), style='bold')
419 | for transition in nfa['transitions']:
420 | for destination in nfa['transitions'][transition]:
421 | g.edge(str(transition[0]), str(destination),
422 | label=transition[1])
423 |
424 | g.render(filename=os.path.join(path, name + '.dot'))
425 |
426 |
427 | ####################################################################
428 | # AFW ##############################################################
429 |
430 | def afw_json_importer(input_file: str) -> dict:
431 | """ Imports a AFW from a JSON file.
432 |
433 | :param str input_file: path+filename to input JSON file;
434 | :return: *(dict)* representing a AFW.
435 | """
436 | file = open(input_file)
437 | json_file = json.load(file)
438 |
439 | transitions = {} # key [state in states, action in alphabet]
440 | # value [string representing boolean expression]
441 | for p in json_file['transitions']:
442 | transitions[p[0], p[1]] = p[2]
443 |
444 | # return map
445 | afw = {
446 | 'alphabet': set(json_file['alphabet']),
447 | 'states': set(json_file['states']),
448 | 'initial_state': json_file['initial_state'],
449 | 'accepting_states': set(json_file['accepting_states']),
450 | 'transitions': transitions
451 | }
452 | return afw
453 |
454 |
455 | def afw_to_json(afw: dict, name: str, path: str = './'):
456 | """ Exports a AFW to a JSON file.
457 |
458 | :param dict afw: input AFW;
459 | :param str name: output file name;
460 | :param str path: path where to save the JSON file (default:
461 | working directory).
462 | """
463 |
464 | out = {
465 | 'alphabet': list(afw['alphabet']),
466 | 'states': list(afw['states']),
467 | 'initial_state': afw['initial_state'],
468 | 'accepting_states': list(afw['accepting_states']),
469 | 'transitions': list()
470 | }
471 |
472 | for t in afw['transitions']:
473 | out['transitions'].append(
474 | [t[0], t[1], afw['transitions'][t]])
475 |
476 | if not os.path.exists(path):
477 | os.makedirs(path)
478 | file = open(os.path.join(path, name + '.json'), 'w')
479 | json.dump(out, file, sort_keys=True, indent=4)
480 | file.close()
481 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | ================
2 | PySimpleAutomata
3 | ================
4 |
5 | .. Overview
6 |
7 | PySimpleAutomata is a Python library to manage Deterministic Finite Automata (DFA),
8 | Nondeterministic Finite Automata(NFA) and Alternate Finite state automata on Word (AFW).
9 |
10 | This library is not meant for performance nor space consumption optimization,
11 | but for academic purposes:
12 | *PySimpleAutomata aims to be an easily readable but working representation of automata theory*.
13 |
14 | .. Disclaimer
15 |
16 | This project has been developed for "Process and Service Modelling and Analysis" class
17 | of Master of Science in Engineering in Computer Science from Sapienza University of Rome.
18 |
19 | ---------
20 | Structure
21 | ---------
22 |
23 | ::
24 |
25 | /PySimpleAutomata
26 | |
27 | +---/PySimpleAutomata : Library sources
28 | | |
29 | | +--- AFW.py : Functions to handle AFWs automata.
30 | | |
31 | | +--- automata_IO.py : Functions of IN/OUTput for all the automata types.
32 | | |
33 | | +--- DFA.py : Functions to handle DFAs automata.
34 | | |
35 | | +--- NFA.py : Functions to handle NFAs automata.
36 | |
37 | +---/doc : Documentation sources
38 | |
39 | +---/tests : Unit testing and test-cases inputs
40 |
41 |
42 | ------------
43 | Requirements
44 | ------------
45 |
46 | The project is **Python3 only**, tested on Python 3.5 and 3.6.
47 |
48 | `Graphviz - Graph Visualization Software `_ is required to be installed and
49 | present on system path to input/output dot files, while
50 | Python packages `pydot `_ and
51 | `graphviz `_ are used to handle them (respectively input and output).
52 |
53 | `Sphinx `_ is used to generate the documentation.
54 |
55 | `Unittest `_ for Unit testing.
56 |
57 |
58 | ------------
59 | Installation
60 | ------------
61 |
62 | From `PyPi `_ using pip::
63 |
64 | pip install pysimpleautomata
65 |
66 | From source::
67 |
68 | python setup.py install
69 | pip install -r requirements.txt
70 |
71 | It is advised in any case to use a `Python Virtual environment `_ instead of a global installation.
72 |
73 | -------------
74 | Documentation
75 | -------------
76 |
77 | For a detailed explanation of the library API consult the `online documentation `_
78 | or `download it `_.
79 |
80 | -------
81 | Licence
82 | -------
83 |
84 | This code is provided under `MIT Licence `_.
85 |
--------------------------------------------------------------------------------
/doc/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SPHINXPROJ = PySimpleAutomata
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/doc/README.rst:
--------------------------------------------------------------------------------
1 | PySimpleAutomata Documentation
2 | ==============================
3 |
4 | Here resides the documentation sources, which is mainly auto-generated
5 | from library source code.
6 |
7 | To consult the documentation follow this link ``_;
8 | the consultation is provided through readthedocs.org service.
9 |
10 |
11 |
--------------------------------------------------------------------------------
/doc/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 | set SPHINXPROJ=PySimpleAutomata
13 |
14 | if "%1" == "" goto help
15 |
16 | %SPHINXBUILD% >NUL 2>NUL
17 | if errorlevel 9009 (
18 | echo.
19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
20 | echo.installed, then set the SPHINXBUILD environment variable to point
21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
22 | echo.may add the Sphinx directory to PATH.
23 | echo.
24 | echo.If you don't have Sphinx installed, grab it from
25 | echo.http://sphinx-doc.org/
26 | exit /b 1
27 | )
28 |
29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
30 | goto end
31 |
32 | :help
33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
34 |
35 | :end
36 | popd
37 |
--------------------------------------------------------------------------------
/doc/source/AFW.rst:
--------------------------------------------------------------------------------
1 | AFW
2 | ===
3 |
4 | .. automodule:: PySimpleAutomata.AFW
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
9 | .. rubric:: List
10 |
11 | .. autosummary::
12 |
13 | formula_dual
14 | afw_complementation
15 | afw_completion
16 | afw_intersection
17 | afw_nonemptiness_check
18 | afw_nonuniversality_check
19 | afw_to_nfa_conversion
20 | afw_union
21 | nfa_to_afw_conversion
22 | rename_afw_states
23 | afw_word_acceptance
24 |
25 | .. rubric:: Functions
--------------------------------------------------------------------------------
/doc/source/DFA.rst:
--------------------------------------------------------------------------------
1 | DFA
2 | ===
3 |
4 | .. automodule:: PySimpleAutomata.DFA
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
9 | .. rubric:: List
10 |
11 | .. autosummary::
12 |
13 | dfa_co_reachable
14 | dfa_complementation
15 | dfa_completion
16 | dfa_intersection
17 | dfa_minimization
18 | dfa_nonemptiness_check
19 | dfa_projection
20 | dfa_reachable
21 | dfa_trimming
22 | dfa_union
23 | dfa_word_acceptance
24 | rename_dfa_states
25 |
26 | .. rubric:: Functions
27 |
--------------------------------------------------------------------------------
/doc/source/IO.rst:
--------------------------------------------------------------------------------
1 | I/O
2 | ---
3 |
4 | IO is managed by :mod:`PySimpleAutomata.automata_IO` module::
5 |
6 | from PySimpleAutomata import automata_IO
7 |
8 | DOT and JSON file are supported for input and output.
9 | AFW use only JSON because alternate automata doesn't have a "natural"
10 | graph representation.
11 |
12 | Next section will explain the format specification used by the various automaton.
13 |
14 | .. warning::
15 |
16 | Placing readability over functionality, the library doesn't handle explicitly exceptions.
17 | In that way the library will not give any hint about possible errors,
18 | but will just rise the low level relative Python exception.
19 | Pay attention in providing correct automata in input as stated in this documentation.
20 |
21 |
22 |
23 | ***
24 | DOT
25 | ***
26 |
27 | **DOT** is a plain text graph description language.
28 | This format is recommend for DFAs and NFAs because permits to
29 | have natively both a graphical representation and an easy to read plain-text.
30 |
31 | Here will be covered the aspects of DOT format needed for of this library,
32 | but for a complete understanding see the `Graphviz documentation `_
33 | **Different usages of DOT may lead to unexpected results and behaviours so attain to the rules exposed in this documentation**.
34 |
35 | DOT file is managed in input through `Pydot `_ package
36 | because of its file handling flexibility,
37 | while `graphviz `_ is used to
38 | output because returns a cleaner file, without tons of useless metadata.
39 |
40 | --------------------------------------------------------------------
41 |
42 | The automata definition is wrapped in a *digraph* statement (i.e. directed graph)
43 | ::
44 |
45 | digraph{ ... }
46 |
47 | --------------------------------------------------------------------
48 |
49 | **States** are graph Nodes that are represented simply by a string and needs just to be listed.
50 | Follow `DOT specification `_
51 | for name restrictions.
52 | ::
53 |
54 | digraph{
55 | s1
56 | s2
57 | s3
58 | s4
59 | }
60 |
61 | |IMG_nodes|
62 |
63 | .. Note::
64 |
65 | Following names are prohibited as are used by PySimpleAutomata for
66 | specific task (covered in following sections):
67 |
68 | - fake
69 | - fakeN (where N is a number)
70 | - sink
71 | - None
72 |
73 | --------------------------------------------------------------------
74 |
75 | **Root nodes** are identified by the attribute *root=true*
76 | ::
77 |
78 | s0 [root=true]
79 |
80 | For graphical purposes each root has an entering bold arrow.
81 | In order to draw that a dummy node *fake* with attribute *style=invisible*
82 | is created and linked to the root with attribute *style=bold*.
83 |
84 | This node and transition will be ignored during the importing or created when exporting
85 |
86 | ::
87 |
88 | fake [style=invisible]
89 | fake -> s0 [style=bold]
90 |
91 | s0 [root=true]
92 |
93 | |IMG_root_node|
94 |
95 | --------------------------------------------------------------------
96 |
97 | **Accepting nodes** are identified by the attribute *shape=doublecircle*
98 | ::
99 |
100 | t4 [shape=doublecircle]
101 |
102 | |IMG_accepting_node|
103 |
104 | --------------------------------------------------------------------
105 |
106 | **Transitions** between nodes is represented by a directed arrow and an attribute *label*
107 | ::
108 |
109 | s0 -> s1 [label="5c"]
110 |
111 | |IMG_transition|
112 |
113 | Always double quote labels.
114 |
115 | DFA
116 | ***
117 |
118 | Example::
119 |
120 | digraph{
121 | fake [style=invisible]
122 | fake -> s0 [style=bold]
123 |
124 | s0 [root=true, shape=doublecircle]
125 |
126 | s1
127 | s2 [shape=doublecircle]
128 | s3
129 | s4
130 |
131 | s0 -> s1 [label="5c"]
132 | s0 -> s4 [label="10c"]
133 | s1 -> s2 [label="5c"]
134 | s1 -> s3 [label="10c"]
135 | s2 -> s3 [label="5c"]
136 | s2 -> s3 [label="10c"]
137 | s3 -> s0 [label="gum"]
138 | s4 -> s3 [label="5c"]
139 | s4 -> s3 [label="10c"]
140 | }
141 |
142 | Result: |IMG_dfa_example|
143 |
144 | DFAs have just one root and each node has at most one transition
145 | with a certain label.
146 |
147 | *fake* node is reserved to draw the bold arrow pointing the root.
148 |
149 | *sink* node is reserved for DFA completion.
150 |
151 | *None* node, representing the same name
152 | `Python Built-in Constant `_,
153 | is reserved in case a DFA has no root (i.e. empty DFAs).
154 | It is ignored in input.
155 |
156 | **Attention! No conformance checking**
157 |
158 | INput function :mod:`~PySimpleAutomata.automata_IO.dfa_dot_importer`
159 |
160 | OUTput function :mod:`PySimpleAutomata.automata_IO.dfa_to_dot`
161 |
162 | NFA
163 | ***
164 |
165 | Example::
166 |
167 | digraph{
168 | fake [style=invisible]
169 | t0 [root=true, shape=doublecircle]
170 |
171 | fake -> t0 [style=bold]
172 |
173 | foo [style=invisible]
174 | a0 [root=true, shape=doublecircle]
175 |
176 | foo -> a0 [style=bold]
177 |
178 | t1
179 | t2
180 | t3
181 | t4 [shape=doublecircle]
182 |
183 | a0 -> t1 [label="a"]
184 | t0 -> t1 [label="b"]
185 | t0 -> t2 [label="a"]
186 | t1 -> t2 [label="c"]
187 | t1 -> t3 [label="c"]
188 | t1 -> t4 [label="b"]
189 | t2 -> t4 [label="a"]
190 | t2 -> t2 [label="a"]
191 | t2 -> t1 [label="b"]
192 | t3 -> t3 [label="b"]
193 | t3 -> t1 [label="a"]
194 | t3 -> t4 [label="a"]
195 | t3 -> t0 [label="b"]
196 | t3 -> t0 [label="c"]
197 | t4 -> t0 [label="c"]
198 | t4 -> t0 [label="b"]
199 | t4 -> t4 [label="a"]
200 | }
201 |
202 | Result: |IMG_nfa_example|
203 |
204 | NFAs have at least one root and each node may have more transition
205 | with a certain label.
206 |
207 | All nodes labelled with *style=invisible* and their relative transition are skipped
208 | as they are used to draw roots arrows.
209 |
210 | *fakeN* (where N is a number) nodes are reserved for output purpose
211 | to draw the bold arrows pointing the roots.
212 |
213 | *sink* node is reserved for NFA completion.
214 |
215 | **Attention! No conformance checking**
216 |
217 | INput function :mod:`PySimpleAutomata.automata_IO.nfa_dot_importer`
218 |
219 | OUTput function :mod:`PySimpleAutomata.automata_IO.nfa_to_dot`
220 |
221 |
222 | .. original dim 978x724
223 | .. |IMG_dfa_example| image:: /_static/dfa_example.png
224 | :height: 489
225 | :width: 724
226 |
227 | .. original dim 978x724
228 | .. |IMG_nfa_example| image:: /_static/nfa_example.png
229 | :height: 489
230 | :width: 724
231 |
232 | .. |IMG_nodes| image:: /_static/nodes.png
233 |
234 | .. |IMG_root_node| image:: /_static/root_node.png
235 | :height: 150
236 | :width: 100
237 |
238 | .. |IMG_accepting_node| image:: /_static/accepting_node.png
239 | :height: 100
240 | :width: 100
241 |
242 | .. |IMG_transition| image:: /_static/transition.png
243 | :height: 200
244 | :width: 100
245 |
246 |
247 | ****
248 | JSON
249 | ****
250 |
251 | `JSON (JavaScript Object Notation) `_ is a
252 | lightweight data-interchange format.
253 | The JSON resemble almost 1:1 the structure of the automata
254 | used in the code and indeed permits a more strait forward data IN/OUT
255 | still being human readable.
256 |
257 | To have a graphical representation of DFAs and NFAs use DOT format.
258 |
259 | The general JSON structure for automata is the following::
260 |
261 | {
262 | "alphabet": ["a1", "a2", ... , "aN"],
263 | "states": ["s1", "s2", ... , "sK"],
264 | "initial_states": ["sX", ... , "sY"],
265 | "accepting_states": ["sA", ..., "sB"],
266 | "transitions": [
267 | ["from", "action", "to"],
268 | ...,
269 | ["from_Z", "action_Z", "to_Z"]
270 | ]
271 | }
272 |
273 | DFA
274 | ***
275 |
276 | Example::
277 |
278 | {
279 | "alphabet": [
280 | "5c",
281 | "10c",
282 | "gum"
283 | ],
284 | "states": [
285 | "s0",
286 | "s1",
287 | "s2",
288 | "s3",
289 | "s4"
290 | ],
291 | "initial_state": "s0",
292 | "accepting_states": [
293 | "s0",
294 | "s2"
295 | ],
296 | "transitions": [
297 | ["s0","5c","s1"],
298 | ["s0","10c","s4"],
299 | ["s1","5c","s2"],
300 | ["s1","10c","s3"],
301 | ["s2","5c","s3"],
302 | ["s2","10c","s3"],
303 | ["s4","5c","s3"],
304 | ["s4","10c","s3"],
305 | ["s3","gum","s0"]
306 | ]
307 | }
308 |
309 |
310 | |IMG_dfa_example|
311 |
312 | Where:
313 | - "alphabet": list of all the actions possible in the automaton,
314 | represented as strings;
315 | - "states": list of all the states of the automaton,
316 | represented as strings;
317 | - "initial_state": string identifying the root node, present in "states";
318 | - "accepting_states": list of accepting states, subset of "states";
319 | - "transitions": list of triples (list), to read
320 | ["from-this-state","performing-this-action","move-to-this-state"],
321 | where "from" and "to" ∈ "states" and "action" ∈ "alphabet"
322 |
323 | **Attention! no conformance checking**
324 |
325 | INput function :mod:`PySimpleAutomata.automata_IO.dfa_json_importer`
326 |
327 | OUTput function :mod:`PySimpleAutomata.automata_IO.dfa_to_json`
328 |
329 |
330 | NFA
331 | ***
332 |
333 | Example::
334 |
335 | {
336 | "alphabet": [
337 | "a",
338 | "b",
339 | "c"
340 | ],
341 | "states": [
342 | "a0",
343 | "t0",
344 | "t1",
345 | "t2",
346 | "t3",
347 | "t4"
348 | ],
349 | "initial_states": [
350 | "t0",
351 | "a0"
352 | ],
353 | "accepting_states": [
354 | "t0",
355 | "t4",
356 | "a0"
357 | ],
358 | "transitions": [
359 | ["t0","b","t1"],
360 | ["t0","a","t2"],
361 | ["t1","c","t3"],
362 | ["t1","c","t2"],
363 | ["t1","b","t4"],
364 | ["t2","b","t1"],
365 | ["t2","a","t2"],
366 | ["t2","a","t4"],
367 | ["t3","c","t0"],
368 | ["t3","b","t0"],
369 | ["t3","b","t3"],
370 | ["t3","a","t4"],
371 | ["t3","a","t1"],
372 | ["t4","a","t4"],
373 | ["t4","b","t0"],
374 | ["t4","c","t0"],
375 | ["a0","a","t1"]
376 | ]
377 | }
378 |
379 | |IMG_nfa_example|
380 |
381 | Where:
382 | - "alphabet": list of all the actions possible in the automaton,
383 | represented as strings;
384 | - "states": list of all the states of the automaton,
385 | represented as strings;
386 | - "initial_states": list of root nodes, subset of "states";
387 | - "accepting_states": list of accepting states, subset of "states";
388 | - "transitions": list of triples (list), to read
389 | ["from-this-state","performing-this-action","move-to-this-state"],
390 | where "from" and "to" ∈ "states" and "action" ∈ "alphabet"
391 |
392 | **Attention! no conformance checking**
393 |
394 | INput function :mod:`PySimpleAutomata.automata_IO.nfa_json_importer`
395 |
396 | OUTput function :mod:`PySimpleAutomata.automata_IO.nfa_to_json`
397 |
398 |
399 | AFW
400 | ***
401 |
402 | Example::
403 |
404 | {
405 | "alphabet": [
406 | "a",
407 | "b"
408 | ],
409 | "states": [
410 | "s",
411 | "q0",
412 | "q1",
413 | "q2"
414 | ],
415 | "initial_state": "s",
416 | "accepting_states": [
417 | "q0",
418 | "s"
419 | ],
420 | "transitions": [
421 | ["q0", "b", "q0 or q2"],
422 | ["q0", "a", "q1"],
423 | ["q1", "a", "q0"],
424 | ["q1", "b", "q1 or q2"],
425 | ["q2", "a", "q2"],
426 | ["s", "b", "s and q0"],
427 | ["s", "a", "s"]
428 | ]
429 | }
430 |
431 | Where:
432 | - "alphabet": list of all the actions possible in the automaton,
433 | represented as strings;
434 | - "states": list of all the states of the automaton,
435 | represented as strings;
436 | - "initial_state": string identifying the root node, present in "states";
437 | - "accepting_states": list of accepting states, subset of "states";
438 | - "transitions": list of triples (list), to read
439 | ["from-this-state",
440 | "performing-this-action",
441 | "move-to-a-state-where-this-formula-holds"],
442 | where "from" ∈ "states", "action" ∈ "alphabet".
443 |
444 | The third element of transition triple is a string representing
445 | a Python formula (that will be evaluated as Boolean), where all the elements
446 | ∈ "states" and only {'and', 'or', 'True', 'False'} operators are
447 | permitted. Parenthesis usage is encouraged to avoid naives errors of
448 | operators evaluation order.
449 |
450 | **Attention! no conformance checking**
451 |
452 | INput function :mod:`PySimpleAutomata.automata_IO.afw_json_importer`
453 |
454 | OUTput function :mod:`PySimpleAutomata.automata_IO.afw_to_json`
455 |
--------------------------------------------------------------------------------
/doc/source/NFA.rst:
--------------------------------------------------------------------------------
1 | NFA
2 | ===
3 |
4 | .. automodule:: PySimpleAutomata.NFA
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
9 | .. rubric:: List
10 |
11 | .. autosummary::
12 |
13 | nfa_complementation
14 | nfa_determinization
15 | nfa_interestingness_check
16 | nfa_intersection
17 | nfa_nonemptiness_check
18 | nfa_nonuniversality_check
19 | nfa_union
20 | nfa_word_acceptance
21 | rename_nfa_states
22 |
23 | .. rubric:: Functions
--------------------------------------------------------------------------------
/doc/source/_static/accepting_node.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Oneiroe/PySimpleAutomata/4b1e9818a553f8e5a30d40a1c681a00a12c9b072/doc/source/_static/accepting_node.png
--------------------------------------------------------------------------------
/doc/source/_static/dfa_example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Oneiroe/PySimpleAutomata/4b1e9818a553f8e5a30d40a1c681a00a12c9b072/doc/source/_static/dfa_example.png
--------------------------------------------------------------------------------
/doc/source/_static/nfa_example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Oneiroe/PySimpleAutomata/4b1e9818a553f8e5a30d40a1c681a00a12c9b072/doc/source/_static/nfa_example.png
--------------------------------------------------------------------------------
/doc/source/_static/nodes.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Oneiroe/PySimpleAutomata/4b1e9818a553f8e5a30d40a1c681a00a12c9b072/doc/source/_static/nodes.png
--------------------------------------------------------------------------------
/doc/source/_static/root_node.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Oneiroe/PySimpleAutomata/4b1e9818a553f8e5a30d40a1c681a00a12c9b072/doc/source/_static/root_node.png
--------------------------------------------------------------------------------
/doc/source/_static/transition.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Oneiroe/PySimpleAutomata/4b1e9818a553f8e5a30d40a1c681a00a12c9b072/doc/source/_static/transition.png
--------------------------------------------------------------------------------
/doc/source/automata_IO.rst:
--------------------------------------------------------------------------------
1 | automata_IO
2 | ===========
3 |
4 | .. automodule:: PySimpleAutomata.automata_IO
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
9 | .. rubric:: List
10 |
11 | .. autosummary::
12 |
13 | dfa_json_importer
14 | dfa_to_json
15 | dfa_dot_importer
16 | dfa_to_dot
17 | dfa_conformance_check
18 | nfa_json_importer
19 | nfa_to_json
20 | nfa_dot_importer
21 | nfa_to_dot
22 | afw_json_importer
23 | afw_to_json
24 |
25 | .. rubric:: Functions
--------------------------------------------------------------------------------
/doc/source/automata_representation.rst:
--------------------------------------------------------------------------------
1 | Automata representation
2 | -----------------------
3 |
4 | For transparency, the automata are represented with Built-in Types
5 | instead of an object structure.
6 | More precisely an automata is a `dict `_
7 | with the following keys :
8 |
9 | - 'alphabet',
10 | - 'states',
11 | - 'initial_state(s)',
12 | - 'accepting_states',
13 | - 'transitions'
14 |
15 | Respective mapping of each key varies depending on the specific automata type (mainly
16 | `sets `_).
17 | See :doc:`DFA`, :doc:`NFA`, :doc:`AFW` for specifications.
18 |
19 | .. note::
20 |
21 | Being not a fixed object is up to the user to ensure the correctness of the data.
22 |
23 | DFA Example::
24 |
25 | dfa_example = {
26 | 'alphabet': {'5c', '10c', 'gum'},
27 | 'states': {'s1', 's0', 's2', 's3'},
28 | 'initial_state': 's0',
29 | 'accepting_states': {'s0'},
30 | 'transitions': {
31 | ('s1', '5c'): 's2',
32 | ('s0', '5c'): 's1',
33 | ('s2', '10c'): 's3',
34 | ('s3', 'gum'): 's0',
35 | ('s2', '5c'): 's3',
36 | ('s0', '10c'): 's2',
37 | ('s1', '10c'): 's3'
38 | }
39 | }
40 |
41 |
--------------------------------------------------------------------------------
/doc/source/conf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | #
4 | # PySimpleAutomata documentation build configuration file,
5 | # created by
6 | # sphinx-quickstart on Mon Feb 13 18:47:18 2017.
7 | #
8 | # This file is execfile()d with the current directory set to its
9 | # containing dir.
10 | #
11 | # Note that not all possible configuration values are present in
12 | # this
13 | # autogenerated file.
14 | #
15 | # All configuration values have a default; values that are
16 | # commented out
17 | # serve to show the default.
18 |
19 | # If extensions (or modules to document with autodoc) are in
20 | # another directory,
21 | # add these directories to sys.path here. If the directory is
22 | # relative to the
23 | # documentation root, use os.path.abspath to make it absolute,
24 | # like shown here.
25 | #
26 | import os
27 | import sys
28 |
29 | sys.path.insert(0, os.path.abspath('./../../'))
30 |
31 | # -- General configuration
32 | # ------------------------------------------------
33 |
34 | # If your documentation needs a minimal Sphinx version, state it
35 | # here.
36 | #
37 | # needs_sphinx = '1.0'
38 |
39 | # Add any Sphinx extension module names here, as strings. They
40 | # can be
41 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your
42 | # custom
43 | # ones.
44 | extensions = ['sphinx.ext.autodoc',
45 | 'sphinx.ext.doctest',
46 | 'sphinx.ext.mathjax',
47 | 'sphinx.ext.viewcode',
48 | 'sphinx.ext.autosummary',
49 | 'sphinx.ext.githubpages']
50 |
51 | autosummary_generate = True
52 |
53 | # Add any paths that contain templates here, relative to this
54 | # directory.
55 | templates_path = ['_templates']
56 |
57 | # The suffix(es) of source filenames.
58 | # You can specify multiple suffix as a list of string:
59 | #
60 | # source_suffix = ['.rst', '.md']
61 | source_suffix = '.rst'
62 |
63 | # The master toctree document.
64 | master_doc = 'index'
65 |
66 | # General information about the project.
67 | project = 'PySimpleAutomata'
68 | copyright = '2017, Alessio Cecconi'
69 | author = 'Alessio Cecconi'
70 |
71 | # The version info for the project you're documenting, acts as
72 | # replacement for
73 | # |version| and |release|, also used in various other places
74 | # throughout the
75 | # built documents.
76 | #
77 | # The short X.Y version.
78 | version = '0.5.0'
79 | # The full version, including alpha/beta/rc tags.
80 | release = '0.5.0'
81 |
82 | # The language for content autogenerated by Sphinx. Refer to
83 | # documentation
84 | # for a list of supported languages.
85 | #
86 | # This is also used if you do content translation via gettext
87 | # catalogs.
88 | # Usually you set "language" from the command line for these cases.
89 | language = None
90 |
91 | # List of patterns, relative to source directory, that match
92 | # files and
93 | # directories to ignore when looking for source files.
94 | # This patterns also effect to html_static_path and html_extra_path
95 | exclude_patterns = []
96 |
97 | # The name of the Pygments (syntax highlighting) style to use.
98 | pygments_style = 'sphinx'
99 |
100 | # If true, `todo` and `todoList` produce output, else they
101 | # produce nothing.
102 | todo_include_todos = False
103 |
104 | # -- Options for HTML output
105 | # ----------------------------------------------
106 |
107 | # The theme to use for HTML and HTML Help pages. See the
108 | # documentation for
109 | # a list of builtin themes.
110 | #
111 | html_theme = 'alabaster'
112 | # html_theme = 'classic'
113 |
114 | # Theme options are theme-specific and customize the look and
115 | # feel of a theme
116 | # further. For a list of options available for each theme, see the
117 | # documentation.
118 | #
119 | # ALABASTER OPTIONS
120 | html_sidebars = {
121 | '**': [
122 | 'about.html',
123 | 'navigation.html',
124 | 'relations.html',
125 | 'searchbox.html',
126 | 'donate.html',
127 | ]
128 | }
129 | html_theme_options = {
130 | 'description': 'Library to manage DFA, NFA and AFW automata',
131 | 'github_user': 'oneiroe',
132 | 'github_repo': 'PySimpleAutomata',
133 | 'fixed_sidebar': True,
134 | 'sidebar_collapse': False
135 | }
136 |
137 | # Add any paths that contain custom static files (such as style
138 | # sheets) here,
139 | # relative to this directory. They are copied after the builtin
140 | # static files,
141 | # so a file named "default.css" will overwrite the builtin
142 | # "default.css".
143 | html_static_path = ['_static']
144 |
145 | # -- Options for HTMLHelp output
146 | # ------------------------------------------
147 |
148 | # Output file base name for HTML help builder.
149 | htmlhelp_basename = 'PySimpleAutomatadoc'
150 |
151 | # -- Options for LaTeX output
152 | # ---------------------------------------------
153 |
154 | latex_elements = {
155 | # The paper size ('letterpaper' or 'a4paper').
156 | #
157 | # 'papersize': 'letterpaper',
158 |
159 | # The font size ('10pt', '11pt' or '12pt').
160 | #
161 | # 'pointsize': '10pt',
162 |
163 | # Additional stuff for the LaTeX preamble.
164 | #
165 | # 'preamble': '',
166 |
167 | # Latex figure (float) alignment
168 | #
169 | # 'figure_align': 'htbp',
170 | }
171 |
172 | # Grouping the document tree into LaTeX files. List of tuples
173 | # (source start file, target name, title,
174 | # author, documentclass [howto, manual, or own class]).
175 | latex_documents = [
176 | (master_doc, 'PySimpleAutomata.tex',
177 | 'PySimpleAutomata Documentation',
178 | 'Alessio Cecconi', 'manual'),
179 | ]
180 |
181 | # -- Options for manual page output
182 | # ---------------------------------------
183 |
184 | # One entry per manual page. List of tuples
185 | # (source start file, name, description, authors, manual section).
186 | man_pages = [
187 | (master_doc, 'pysimpleautomata',
188 | 'PySimpleAutomata Documentation',
189 | [author], 1)
190 | ]
191 |
192 | # -- Options for Texinfo output
193 | # -------------------------------------------
194 |
195 | # Grouping the document tree into Texinfo files. List of tuples
196 | # (source start file, target name, title, author,
197 | # dir menu entry, description, category)
198 | texinfo_documents = [
199 | (master_doc, 'PySimpleAutomata',
200 | 'PySimpleAutomata Documentation',
201 | author, 'PySimpleAutomata', 'One line description of project.',
202 | 'Miscellaneous'),
203 | ]
204 |
--------------------------------------------------------------------------------
/doc/source/index.rst:
--------------------------------------------------------------------------------
1 | .. PySimpleAutomata documentation master file, created by
2 | sphinx-quickstart on Mon Feb 13 18:47:18 2017.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Welcome to PySimpleAutomata's documentation!
7 | ============================================
8 |
9 | PySimpleAutomata
10 | ________________
11 |
12 | .. Overview
13 |
14 | PySimpleAutomata is a Python library to manage Deterministic Finite Automata (DFA),
15 | Nondeterministic Finite Automata(NFA) and Alternate Finite state automata on Word (AFW).
16 |
17 | This library is not meant for performance nor space consumption optimization,
18 | but for academic purposes:
19 | *PySimpleAutomata aims to be an easily readable but working representation of automata theory*.
20 |
21 | .. Disclaimer
22 |
23 | This project has been developed for "Process and Service Modelling and Analysis" class
24 | of Master of Science in Engineering in Computer Science from Sapienza University of Rome.
25 |
26 | .. Contacts
27 |
28 | .. Report any bug through github issue section
29 |
30 | .. References
31 |
32 | .. Download
33 |
34 | Offline versions of this documentation can be found `here `_
35 |
36 | ------------
37 | Installation
38 | ------------
39 |
40 | The project is **Python3 only**, tested on Python 3.5 and 3.6.
41 |
42 | `Graphviz - Graph Visualization Software `_
43 | is required to be installed and present on system path to input/output DOT files.
44 |
45 | From `PyPi `_ using pip::
46 |
47 | pip install pysimpleautomata
48 |
49 | From source::
50 |
51 | python setup.py install
52 | pip install -r requirements.txt
53 |
54 | It is advised in any case to use a `Python Virtual environment `_ instead of a global installation.
55 |
56 | -------
57 | Licence
58 | -------
59 |
60 | This code is provided under `MIT Licence `_.
61 |
62 |
63 | Contents
64 | ========
65 |
66 | .. toctree::
67 | :maxdepth: -1
68 | :includehidden:
69 |
70 | tutorial
71 | modules
72 |
73 | Indices and tables
74 | ==================
75 |
76 | * :ref:`Complete Index `
77 | * :ref:`modindex`
78 | * :ref:`search`
79 |
--------------------------------------------------------------------------------
/doc/source/installation.rst:
--------------------------------------------------------------------------------
1 | Requirements
2 | ------------
3 |
4 | The project is **Python3 only**, tested on Python 3.5 and 3.6.
5 |
6 | `Graphviz - Graph Visualization Software `_ is required to be installed and
7 | present on system path to input/output DOT files.
8 |
9 | Relevant Python packages (included in the installation):
10 | - `pydot `_ for DOT import;
11 | - `graphviz `_ for DOT export;
12 | - `Sphinx `_ for documentation generation;
13 | - `Unittest `_ for Unit testing.
14 |
15 |
16 | Installation
17 | ------------
18 |
19 | From `PyPi `_ using pip::
20 |
21 | pip install pysimpleautomata
22 |
23 | From source::
24 |
25 | python setup.py install
26 | pip install -r requirements.txt
27 |
28 | It is advised in any case to use a `Python Virtual environment `_ instead of a global installation.
29 |
30 |
--------------------------------------------------------------------------------
/doc/source/modules.rst:
--------------------------------------------------------------------------------
1 | Modules
2 | =======
3 |
4 | .. toctree::
5 | :maxdepth: -1
6 |
7 | DFA
8 | NFA
9 | AFW
10 | automata_IO
11 | unittest
12 |
13 |
--------------------------------------------------------------------------------
/doc/source/test_AFW.rst:
--------------------------------------------------------------------------------
1 | Tests AFW
2 | =========
3 |
4 | .. automodule:: tests.test_AFW
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
9 | .. rubric:: List
10 |
11 | .. autosummary::
12 |
13 | TestAfwWordAcceptance
14 | TestNfaToAfwConversion
15 | TestAfwToNfaConversion
16 | TestAfwCompletion
17 | TestAfwComplementation
18 | TestAfwUnion
19 | TestAfwIntersection
20 | TestAfwNonemptinessCheck
21 | TestAfwNonuniversalityCheck
22 |
23 | .. rubric:: Functions
--------------------------------------------------------------------------------
/doc/source/test_DFA.rst:
--------------------------------------------------------------------------------
1 | Tests DFA
2 | =========
3 |
4 | .. automodule:: tests.test_DFA
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
9 | .. rubric:: List
10 |
11 | .. autosummary::
12 |
13 | TestDfaWordAcceptance
14 | TestDfaCompletion
15 | TestDfaComplementation
16 | TestDfaIntersection
17 | TestDfaUnion
18 | TestDfaMinimization
19 | TestDfaReachable
20 | TestDfaCoReachable
21 | TestDfaTrimming
22 | TestDfaProjection
23 | TestDfaNonemptinessCheck
24 |
25 | .. rubric:: Functions
--------------------------------------------------------------------------------
/doc/source/test_NFA.rst:
--------------------------------------------------------------------------------
1 | Tests NFA
2 | =========
3 |
4 | .. automodule:: tests.test_NFA
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
9 | .. rubric:: List
10 |
11 | .. autosummary::
12 |
13 | TestNfaIntersection
14 | TestNfaUnion
15 | TestNfaDeterminization
16 | TestNfaComplementation
17 | TestNfaNonemptinessCheck
18 | TestNfaNonuniversalityCheck
19 | TestNfaInterestingnessCheck
20 | TestNfaWordAcceptance
21 |
22 | .. rubric:: Functions
--------------------------------------------------------------------------------
/doc/source/test_automata_IO.rst:
--------------------------------------------------------------------------------
1 | Tests automata_IO
2 | =================
3 |
4 | .. automodule:: tests.test_automata_IO
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
9 | .. rubric:: List
10 |
11 | .. autosummary::
12 |
13 | TestDfaDotImporter
14 | TestDfaToDot
15 | TestDfaJsonImporter
16 | TestDfaToJson
17 | TestNfaDotImporter
18 | TestNfaToDot
19 | TestNfaJsonImporter
20 | TestNfaToJson
21 | TestAfwJsonImporter
22 | TestAfwToJson
23 |
24 | .. rubric:: Functions
--------------------------------------------------------------------------------
/doc/source/tutorial.rst:
--------------------------------------------------------------------------------
1 | Tutorial
2 | ========
3 |
4 | .. Installation
5 |
6 | .. include:: installation.rst
7 |
8 | .. IO
9 |
10 | .. include:: IO.rst
11 |
12 | .. Automata Representation
13 |
14 | .. include:: automata_representation.rst
15 |
16 | .. Usage
17 |
18 | .. include:: usage.rst
19 |
20 | .. Testing
21 |
22 | Testing
23 | -------
24 |
25 | If in one hand the library doesn't handle exceptions or perform any
26 | conformance checking on input, it has been widely tested through
27 | `unit testing methodology `_,
28 | decreasing possibilities of unexpected behaviours.
29 | Python `unittest — Unit testing framework `_
30 | has been used.
31 |
32 | Around 250 tests are provided and are listed in :doc:`unittest`.
--------------------------------------------------------------------------------
/doc/source/unittest.rst:
--------------------------------------------------------------------------------
1 | Unit testing
2 | ============
3 |
4 | .. toctree::
5 | :maxdepth: -1
6 |
7 | test_DFA
8 | test_NFA
9 | test_AFW
10 | test_automata_IO
11 |
12 | .. note::
13 |
14 | **[EXPECTED FAILURE]** Means that it's known and designed that the
15 | result of a function given that specific situation is a failure
16 | (i.e. rise of an exception).
17 |
18 | Recreating the situation of testCases marked as EXPECTED FAILURE must be avoided.
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/doc/source/usage.rst:
--------------------------------------------------------------------------------
1 | Usage
2 | -----
3 |
4 | Just import the desired module and use the functions.
5 |
6 | Example::
7 |
8 | from PySimpleAutomata import DFA, automata_IO
9 |
10 | dfa_example = automata_IO.dfa_dot_importer('/PATH-IN/input.dot')
11 |
12 | DFA.dfa_completion(dfa_example)
13 | new_dfa=DFA.dfa_minimization(dfa_example)
14 |
15 | automata_IO.dfa_to_dot(new_dfa, 'output-name', '/PATH-OUT/')
16 |
17 | See :doc:`modules` for the complete API explanation.
18 |
19 | .. note::
20 | By design all the functions, except the predisposed one, will NOT return
21 | MINIMAL automata.
22 |
23 | .. warning::
24 | States with the same name from different automata should not be considered
25 | the same entity. Avoid using automata with state names in common.
26 | When in doubt use `rename_X_states()` functions (where X=type of automaton)
27 | to rename the whole automaton states.
28 |
29 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | pydot==1.2.3
2 | graphviz==0.5.2
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | # from distutils.core import setup
4 |
5 | with open('README.rst') as f:
6 | readme = f.read()
7 |
8 | with open('LICENSE') as f:
9 | license = f.read()
10 |
11 | setup(
12 | name='PySimpleAutomata',
13 | version='0.5.0',
14 | author='Alessio Cecconi',
15 | author_email='alessio.cecconi.1991@gmail.com',
16 | url='https://github.com/Oneiroe/PySimpleAutomata',
17 | license=license,
18 | description='Python library to manage DFA, NFA and AFW automata',
19 | long_description=readme,
20 | packages=find_packages(exclude=['doc', 'tests']),
21 | install_requires=['graphviz', 'pydot'],
22 | setup_requires=['graphviz', 'pydot'],
23 | data_files=[("", ["LICENSE"])],
24 | classifiers=[
25 | # How mature is this project? Common values are
26 | # 3 - Alpha
27 | # 4 - Beta
28 | # 5 - Production/Stable
29 | 'Development Status :: 4 - Beta',
30 |
31 | # Indicate who your project is intended for
32 | 'Intended Audience :: Education',
33 | 'Topic :: Scientific/Engineering',
34 | 'Topic :: Software Development :: Libraries :: Python Modules',
35 |
36 | # Pick your license as you wish (should match "license" above)
37 | 'License :: OSI Approved :: MIT License',
38 |
39 | # Specify the Python versions you support here. In particular, ensure
40 | 'Programming Language :: Python :: 3 :: Only',
41 | 'Programming Language :: Python :: 3.5',
42 | 'Programming Language :: Python :: 3.6',
43 | ],
44 | keywords='automata DFA NFA AFW',
45 | )
46 |
47 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Oneiroe/PySimpleAutomata/4b1e9818a553f8e5a30d40a1c681a00a12c9b072/tests/__init__.py
--------------------------------------------------------------------------------
/tests/context.py:
--------------------------------------------------------------------------------
1 | """ This is needed for the tests to keep consistent the references to the modules """
2 |
3 | import os
4 | import sys
5 |
6 | sys.path.insert(0, os.path.abspath('..'))
7 |
8 | import PySimpleAutomata
9 |
--------------------------------------------------------------------------------
/tests/dot/afw/nfa_afw_to_nfa_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 | fake -> s0 [style=bold]
5 |
6 | fake2 [style=invisible]
7 |
8 | fake2 -> s0 [style=bold]
9 |
10 | q0
11 | q1
12 | q2 [shape=doublecircle]
13 | s
14 |
15 | s0 -> s1 [label="a"]
16 | s0 -> s3 [label="b"]
17 | s1 -> s2 [label="b"]
18 | s1 -> s4 [label="a"]
19 | s2 -> s2 [label="b"]
20 | s2 -> s0 [label="b"]
21 | s3 -> s2 [label="a"]
22 | s3 -> s4 [label="b"]
23 | s4 -> s4 [label="a"]
24 | s4 -> s0 [label="b"]
25 | }
--------------------------------------------------------------------------------
/tests/dot/afw/nfa_nfa_to_afw_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2 [shape=doublecircle]
9 | s3
10 | s4
11 |
12 | s0 -> s1 [label="a"]
13 | s0 -> s3 [label="b"]
14 | s1 -> s2 [label="b"]
15 | s1 -> s4 [label="a"]
16 | s2 -> s2 [label="b"]
17 | s2 -> s0 [label="b"]
18 | s3 -> s2 [label="a"]
19 | s3 -> s4 [label="b"]
20 | s4 -> s4 [label="a"]
21 | s4 -> s0 [label="b"]
22 | }
--------------------------------------------------------------------------------
/tests/dot/automata_io/automata_io_dfa_imported_intersection.dot:
--------------------------------------------------------------------------------
1 | digraph {
2 | fake [style=invisible]
3 | "('c3', 't1')"
4 | "('s0', 't0')" [root=true]
5 | "('c3', 'c1')"
6 | "('c1', 'c1')"
7 | "('c2', 'c2')"
8 | "('c4', 't3')" [shape=doublecircle]
9 | "('c4', 'c3')"
10 | "('c2', 't1')"
11 | "('c4', 't2')"
12 | "('s0', 't3')"
13 | "('c1', 'c4')"
14 | "('c2', 'c3')"
15 | "('c4', 'c4')" [shape=doublecircle]
16 | "('c2', 'c4')"
17 | "('c1', 't1')"
18 | "('s1', 'c2')"
19 | "('c1', 'c2')"
20 | "('s1', 't1')"
21 | "('s1', 't2')"
22 | "('c3', 't3')"
23 | "('c4', 'c2')"
24 | "('c3', 't2')"
25 | "('c2', 't2')"
26 | "('c4', 't1')"
27 | "('s0', 't1')"
28 | "('s0', 'c3')"
29 | "('s0', 't2')"
30 | "('s1', 'c4')" [shape=doublecircle]
31 | "('c2', 't3')"
32 | "('c2', 't0')"
33 | "('c4', 't0')"
34 | "('s0', 'c2')"
35 | "('c3', 'c4')"
36 | "('c1', 't0')"
37 | "('s0', 'c4')"
38 | "('c1', 't3')"
39 | "('s0', 'c1')"
40 | "('c1', 'c3')"
41 | "('c3', 't0')"
42 | "('s1', 't0')"
43 | "('c3', 'c2')"
44 | "('c4', 'c1')"
45 | "('c2', 'c1')"
46 | "('c1', 't2')"
47 | "('s1', 'c3')"
48 | "('s1', 't3')" [shape=doublecircle]
49 | "('s1', 'c1')"
50 | "('c3', 'c3')"
51 | fake -> "('s0', 't0')" [style=bold]
52 | "('c2', 'c2')" -> "('c4', 'c4')" [label=gum]
53 | "('c2', 't0')" -> "('c3', 'c1')" [label="5c"]
54 | "('s0', 't1')" -> "('s1', 't3')" [label=gum]
55 | "('c2', 'c2')" -> "('c3', 'c3')" [label="5c"]
56 | "('c3', 't2')" -> "('c1', 't0')" [label=gum]
57 | "('s0', 't1')" -> "('c1', 't2')" [label="5c"]
58 | "('c2', 't1')" -> "('c4', 't3')" [label=gum]
59 | "('c3', 't1')" -> "('c1', 't3')" [label=gum]
60 | "('s0', 'c2')" -> "('s1', 'c4')" [label=gum]
61 | "('c2', 't1')" -> "('c3', 't2')" [label="5c"]
62 | "('c1', 'c1')" -> "('c2', 'c2')" [label="10c"]
63 | "('s0', 't0')" -> "('c1', 'c1')" [label="5c"]
64 | "('c1', 't0')" -> "('c2', 't1')" [label="10c"]
65 | "('s0', 'c2')" -> "('c1', 'c3')" [label="5c"]
66 | "('s0', 't2')" -> "('s1', 't0')" [label=gum]
67 | "('c3', 'c3')" -> "('c1', 'c1')" [label=gum]
68 | "('c2', 'c3')" -> "('c4', 'c1')" [label=gum]
69 | "('c2', 't2')" -> "('c4', 't0')" [label=gum]
70 | "('c3', 'c2')" -> "('c1', 'c4')" [label=gum]
71 | "('s0', 'c3')" -> "('s1', 'c1')" [label=gum]
72 | }
--------------------------------------------------------------------------------
/tests/dot/automata_io/automata_io_dfa_importing_intersection.dot:
--------------------------------------------------------------------------------
1 | digraph {
2 | fake [style=invisible]
3 | "('s0', 't3')"
4 | "('s0', 't1')"
5 | "('s1', 't1')"
6 | "('s3', 't3')"
7 | "('s0', 't0')" [root=true]
8 | "('s1', 't0')"
9 | "('s0', 't2')"
10 | "('s2', 't3')"
11 | "('s1', 't2')"
12 | "('s3', 't5')"
13 | "('s2', 't2')"
14 | "('s0', 't4')" [shape=doublecircle]
15 | "('s1', 't4')"
16 | "('s3', 't0')"
17 | "('s2', 't1')"
18 | "('s2', 't4')"
19 | "('s0', 't5')" [shape=doublecircle]
20 | "('s2', 't0')"
21 | "('s3', 't2')"
22 | "('s3', 't4')"
23 | "('s3', 't1')"
24 | "('s2', 't5')"
25 | "('s1', 't5')"
26 | "('s1', 't3')"
27 | fake -> "('s0', 't0')" [style=bold]
28 | "('s0', 't1')" -> "('s1', 't5')" [label="5c"]
29 | "('s2', 't1')" -> "('s3', 't5')" [label="5c"]
30 | "('s1', 't1')" -> "('s2', 't5')" [label="5c"]
31 | "('s3', 't5')" -> "('s0', 't0')" [label=gum]
32 | "('s1', 't2')" -> "('s2', 't3')" [label="5c"]
33 | "('s0', 't1')" -> "('s2', 't2')" [label="10c"]
34 | "('s1', 't1')" -> "('s3', 't2')" [label="10c"]
35 | "('s0', 't2')" -> "('s1', 't3')" [label="5c"]
36 | "('s3', 't3')" -> "('s0', 't1')" [label=gum]
37 | "('s2', 't0')" -> "('s3', 't1')" [label="5c"]
38 | "('s0', 't0')" -> "('s1', 't1')" [label="5c"]
39 | "('s2', 't1')" -> "('s3', 't2')" [label="10c"]
40 | "('s3', 't2')" -> "('s0', 't4')" [label=gum]
41 | "('s1', 't0')" -> "('s2', 't1')" [label="5c"]
42 | "('s2', 't2')" -> "('s3', 't3')" [label="5c"]
43 | }
--------------------------------------------------------------------------------
/tests/dot/automata_io/automata_io_dfa_importing_no_state.dot:
--------------------------------------------------------------------------------
1 | digraph {
2 | fake -> "('s0', 't0')" [style=bold]
3 | "('s0', 't1')" -> "('s1', 't5')" [label="5c"]
4 | "('s2', 't1')" -> "('s3', 't5')" [label="5c"]
5 | "('s1', 't1')" -> "('s2', 't5')" [label="5c"]
6 | "('s3', 't5')" -> "('s0', 't0')" [label=gum]
7 | "('s1', 't2')" -> "('s2', 't3')" [label="5c"]
8 | "('s0', 't1')" -> "('s2', 't2')" [label="10c"]
9 | "('s1', 't1')" -> "('s3', 't2')" [label="10c"]
10 | "('s0', 't2')" -> "('s1', 't3')" [label="5c"]
11 | "('s3', 't3')" -> "('s0', 't1')" [label=gum]
12 | "('s2', 't0')" -> "('s3', 't1')" [label="5c"]
13 | "('s0', 't0')" -> "('s1', 't1')" [label="5c"]
14 | "('s2', 't1')" -> "('s3', 't2')" [label="10c"]
15 | "('s3', 't2')" -> "('s0', 't4')" [label=gum]
16 | "('s1', 't0')" -> "('s2', 't1')" [label="5c"]
17 | "('s2', 't2')" -> "('s3', 't3')" [label="5c"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/automata_io/automata_io_nfa_dot_importer_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | fake01 [style=invisible]
4 | s0 [root=true, shape=doublecircle]
5 | s3 [root=true]
6 | fake -> s0 [style=bold]
7 | fake01 -> s3 [style=bold]
8 |
9 | s1
10 | s2
11 |
12 |
13 | s0 -> s1 [label="5c"]
14 | s0 -> s2 [label="5c"]
15 | s0 -> s2 [label="10c"]
16 | s1 -> s2 [label="5c"]
17 | s1 -> s3 [label="5c"]
18 | s1 -> s3 [label="10c"]
19 | s2 -> s3 [label="5c"]
20 | s2 -> s3 [label="10c"]
21 | s3 -> s0 [label="gum"]
22 | }
--------------------------------------------------------------------------------
/tests/dot/automata_io/automata_io_nfa_imported_intersection.dot:
--------------------------------------------------------------------------------
1 | digraph {
2 | fake [style=invisible]
3 | "('c3', 't1')"
4 | "('s0', 't0')" [root=true]
5 | "('c3', 'c1')"
6 | "('c1', 'c1')"
7 | "('c2', 'c2')"
8 | "('c4', 't3')" [shape=doublecircle]
9 | "('c4', 'c3')"
10 | "('c2', 't1')"
11 | "('c4', 't2')"
12 | "('s0', 't3')"
13 | "('c1', 'c4')"
14 | "('c2', 'c3')"
15 | "('c4', 'c4')" [shape=doublecircle]
16 | "('c2', 'c4')"
17 | "('c1', 't1')"
18 | "('s1', 'c2')"
19 | "('c1', 'c2')"
20 | "('s1', 't1')"
21 | "('s1', 't2')"
22 | "('c3', 't3')"
23 | "('c4', 'c2')"
24 | "('c3', 't2')"
25 | "('c2', 't2')"
26 | "('c4', 't1')"
27 | "('s0', 't1')"
28 | "('s0', 'c3')"
29 | "('s0', 't2')"
30 | "('s1', 'c4')" [shape=doublecircle]
31 | "('c2', 't3')"
32 | "('c2', 't0')"
33 | "('c4', 't0')"
34 | "('s0', 'c2')"
35 | "('c3', 'c4')"
36 | "('c1', 't0')"
37 | "('s0', 'c4')"
38 | "('c1', 't3')"
39 | "('s0', 'c1')"
40 | "('c1', 'c3')"
41 | "('c3', 't0')"
42 | "('s1', 't0')"
43 | "('c3', 'c2')"
44 | "('c4', 'c1')"
45 | "('c2', 'c1')"
46 | "('c1', 't2')"
47 | "('s1', 'c3')"
48 | "('s1', 't3')" [shape=doublecircle]
49 | "('s1', 'c1')"
50 | "('c3', 'c3')"
51 | fake -> "('s0', 't0')" [style=bold]
52 | "('c2', 'c2')" -> "('c4', 'c4')" [label=gum]
53 | "('c2', 't0')" -> "('c3', 'c1')" [label="5c"]
54 | "('s0', 't1')" -> "('s1', 't3')" [label=gum]
55 | "('c2', 'c2')" -> "('c3', 'c3')" [label="5c"]
56 | "('c3', 't2')" -> "('c1', 't0')" [label=gum]
57 | "('s0', 't1')" -> "('c1', 't2')" [label="5c"]
58 | "('c2', 't1')" -> "('c4', 't3')" [label=gum]
59 | "('c3', 't1')" -> "('c1', 't3')" [label=gum]
60 | "('s0', 'c2')" -> "('s1', 'c4')" [label=gum]
61 | "('c2', 't1')" -> "('c3', 't2')" [label="5c"]
62 | "('c1', 'c1')" -> "('c2', 'c2')" [label="10c"]
63 | "('s0', 't0')" -> "('c1', 'c1')" [label="5c"]
64 | "('c1', 't0')" -> "('c2', 't1')" [label="10c"]
65 | "('s0', 'c2')" -> "('c1', 'c3')" [label="5c"]
66 | "('s0', 't2')" -> "('s1', 't0')" [label=gum]
67 | "('c3', 'c3')" -> "('c1', 'c1')" [label=gum]
68 | "('c2', 'c3')" -> "('c4', 'c1')" [label=gum]
69 | "('c2', 't2')" -> "('c4', 't0')" [label=gum]
70 | "('c3', 'c2')" -> "('c1', 'c4')" [label=gum]
71 | "('s0', 'c3')" -> "('s1', 'c1')" [label=gum]
72 | }
--------------------------------------------------------------------------------
/tests/dot/automata_io/automata_io_nfa_importer_pydot_nfa_simple.dot:
--------------------------------------------------------------------------------
1 | digraph G {
2 | graph [bb="0,0,199.55,379.7"];
3 | node [label="\N"];
4 | fake0 [height=0.5,
5 | pos="159.55,361.7",
6 | style=invisible,
7 | width=0.84854];
8 | s3 [height=0.5,
9 | pos="159.55,288.7",
10 | root=True,
11 | width=0.75];
12 | fake0 -> s3 [pos="e,159.55,306.73 159.55,343.51 159.55,335.49 159.55,325.74 159.55,316.77",
13 | style=bold];
14 | fake1 [height=0.5,
15 | pos="30.547,288.7",
16 | style=invisible,
17 | width=0.84854];
18 | s0 [height=0.63468,
19 | pos="42.547,196.85",
20 | root=True,
21 | shape=doublecircle,
22 | width=0.63468];
23 | fake1 -> s0 [pos="e,39.646,219.57 32.861,270.37 34.402,258.83 36.474,243.32 38.305,229.61",
24 | style=bold];
25 | s2 [height=0.5,
26 | pos="107.55,18",
27 | width=0.75];
28 | s0 -> s2 [label="5c",
29 | lp="14.047,105",
30 | pos="e,83.031,25.727 29.678,177.46 16.21,155.89 -1.7765,119.02 7.5473,87 12.408,70.309 15.37,65.339 28.547,54 41.513,42.843 58.601,34.682 \
31 | 73.567,29.06"];
32 | s0 -> s2 [label="10c",
33 | lp="61.547,105",
34 | pos="e,91.66,32.886 40.601,173.69 39.463,151.05 40.062,115.29 51.547,87 58.79,69.164 72.449,52.427 84.464,40.021"];
35 | s1 [height=0.5,
36 | pos="107.55,105",
37 | width=0.75];
38 | s0 -> s1 [label="5c",
39 | lp="83.047,148.5",
40 | pos="e,93.693,120.52 53.425,176.54 59.75,165.79 68.118,152.32 76.547,141 79.737,136.71 83.336,132.32 86.922,128.15"];
41 | s2 -> s3 [label="5c",
42 | lp="167.05,148.5",
43 | pos="e,161.3,270.63 117.84,34.817 126.07,48.188 137.23,68.127 143.55,87 164.99,151.01 159.9,170.24 162.55,237.7 162.81,244.36 162.83,\
44 | 246.04 162.55,252.7 162.44,255.26 162.28,257.92 162.1,260.58"];
45 | s2 -> s3 [label="10c",
46 | lp="189.55,148.5",
47 | pos="e,169.66,271.94 125.33,31.965 132.84,38.057 141.28,45.783 147.55,54 172.36,86.518 172.37,100.43 177.55,141 183.83,190.24 189.23,\
48 | 204.45 177.55,252.7 176.72,256.12 175.49,259.59 174.06,262.94"];
49 | s3 -> s0 [label=gum,
50 | lp="77.047,245.2",
51 | pos="e,46.786,219.49 133.05,283.95 112.04,279.69 83.202,270.83 64.547,252.7 57.919,246.25 53.231,237.55 49.939,228.98"];
52 | s1 -> s2 [label="5c",
53 | lp="114.05,61.5",
54 | pos="e,107.55,36.175 107.55,86.799 107.55,75.163 107.55,59.548 107.55,46.237"];
55 | s1 -> s3 [label="5c",
56 | lp="147.05,196.85",
57 | pos="e,154.84,270.73 112.53,122.75 116.61,136.51 122.51,156.5 127.55,174 136.1,203.73 145.69,237.92 152.12,260.97"];
58 | s1 -> s3 [label="10c",
59 | lp="113.55,196.85",
60 | pos="e,143.48,274.23 103.18,122.78 98.101,145.51 91.808,186.98 103.55,219.7 110.09,237.94 123.87,254.78 136.13,267.14"];
61 | }
62 |
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_co_reachable_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_co_reachable_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 | s4
11 | s5
12 | s6
13 |
14 | s0 -> s1 [label="5c"]
15 | s0 -> s4 [label="10c"]
16 | s1 -> s2 [label="5c"]
17 | s1 -> s3 [label="10c"]
18 | s2 -> s3 [label="5c"]
19 | s2 -> s3 [label="10c"]
20 | s3 -> s0 [label="gum"]
21 | s4 -> s5 [label="5c"]
22 | s4 -> s6 [label="10c"]
23 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_co_reachable_test_02_co_reachable.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 |
11 |
12 | s0 -> s1 [label="5c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_co_reachable_test_03.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_co_reachable_test_04.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 | s4
11 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_co_reachable_test_05.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 | s4 [shape=doublecircle]
11 |
12 | s1 -> s2 [label="5c"]
13 | s1 -> s3 [label="10c"]
14 | s2 -> s3 [label="5c"]
15 | s2 -> s3 [label="10c"]
16 | s3 -> s4 [label="5c"]
17 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_co_reachable_test_06.dot:
--------------------------------------------------------------------------------
1 | digraph {
2 | fake [style=invisible]
3 | "('c3', 't1')"
4 | "('s0', 't0')" [root=true]
5 | "('c3', 'c1')"
6 | "('c1', 'c1')"
7 | "('c2', 'c2')"
8 | "('c4', 't3')" [shape=doublecircle]
9 | "('c4', 'c3')"
10 | "('c2', 't1')"
11 | "('c4', 't2')"
12 | "('s0', 't3')"
13 | "('c1', 'c4')"
14 | "('c2', 'c3')"
15 | "('c4', 'c4')" [shape=doublecircle]
16 | "('c2', 'c4')"
17 | "('c1', 't1')"
18 | "('s1', 'c2')"
19 | "('c1', 'c2')"
20 | "('s1', 't1')"
21 | "('s1', 't2')"
22 | "('c3', 't3')"
23 | "('c4', 'c2')"
24 | "('c3', 't2')"
25 | "('c2', 't2')"
26 | "('c4', 't1')"
27 | "('s0', 't1')"
28 | "('s0', 'c3')"
29 | "('s0', 't2')"
30 | "('s1', 'c4')" [shape=doublecircle]
31 | "('c2', 't3')"
32 | "('c2', 't0')"
33 | "('c4', 't0')"
34 | "('s0', 'c2')"
35 | "('c3', 'c4')"
36 | "('c1', 't0')"
37 | "('s0', 'c4')"
38 | "('c1', 't3')"
39 | "('s0', 'c1')"
40 | "('c1', 'c3')"
41 | "('c3', 't0')"
42 | "('s1', 't0')"
43 | "('c3', 'c2')"
44 | "('c4', 'c1')"
45 | "('c2', 'c1')"
46 | "('c1', 't2')"
47 | "('s1', 'c3')"
48 | "('s1', 't3')" [shape=doublecircle]
49 | "('s1', 'c1')"
50 | "('c3', 'c3')"
51 | fake -> "('s0', 't0')" [style=bold]
52 | "('c2', 'c2')" -> "('c4', 'c4')" [label=gum]
53 | "('c2', 't0')" -> "('c3', 'c1')" [label="5c"]
54 | "('s0', 't1')" -> "('s1', 't3')" [label=gum]
55 | "('c2', 'c2')" -> "('c3', 'c3')" [label="5c"]
56 | "('c3', 't2')" -> "('c1', 't0')" [label=gum]
57 | "('s0', 't1')" -> "('c1', 't2')" [label="5c"]
58 | "('c2', 't1')" -> "('c4', 't3')" [label=gum]
59 | "('c3', 't1')" -> "('c1', 't3')" [label=gum]
60 | "('s0', 'c2')" -> "('s1', 'c4')" [label=gum]
61 | "('c2', 't1')" -> "('c3', 't2')" [label="5c"]
62 | "('c1', 'c1')" -> "('c2', 'c2')" [label="10c"]
63 | "('s0', 't0')" -> "('c1', 'c1')" [label="5c"]
64 | "('c1', 't0')" -> "('c2', 't1')" [label="10c"]
65 | "('s0', 'c2')" -> "('c1', 'c3')" [label="5c"]
66 | "('s0', 't2')" -> "('s1', 't0')" [label=gum]
67 | "('c3', 'c3')" -> "('c1', 'c1')" [label=gum]
68 | "('c2', 'c3')" -> "('c4', 'c1')" [label=gum]
69 | "('c2', 't2')" -> "('c4', 't0')" [label=gum]
70 | "('c3', 'c2')" -> "('c1', 'c4')" [label=gum]
71 | "('s0', 'c3')" -> "('s1', 'c1')" [label=gum]
72 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_complementation_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_complementation_test_01_complemented.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1 [shape=doublecircle]
8 | s2 [shape=doublecircle]
9 | s3 [shape=doublecircle]
10 | sink [shape=doublecircle]
11 |
12 | s0 -> s1 [label="5c"]
13 | s0 -> s2 [label="10c"]
14 | s0 -> sink [label="gum"]
15 | s1 -> s2 [label="5c"]
16 | s1 -> s3 [label="10c"]
17 | s1 -> sink [label="gum"]
18 | s2 -> s3 [label="5c"]
19 | s2 -> s3 [label="10c"]
20 | s2 -> sink [label="gum"]
21 | s3 -> s0 [label="gum"]
22 | s3 -> sink [label="10c"]
23 | s3 -> sink [label="5c"]
24 | sink -> sink [label="10c"]
25 | sink -> sink [label="5c"]
26 | sink -> sink [label="gum"]
27 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_complementation_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_complementation_test_02_complemented.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | sink [shape=doublecircle]
3 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_complementation_test_03.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_complementation_test_03_complemented.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1 [shape=doublecircle]
8 | s2 [shape=doublecircle]
9 | s3 [shape=doublecircle]
10 | sink [shape=doublecircle]
11 |
12 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_completion_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_completion_test_01_completed.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 | sink
11 |
12 | s0 -> s1 [label="5c"]
13 | s0 -> s2 [label="10c"]
14 | s0 -> sink [label="gum"]
15 | s1 -> s2 [label="5c"]
16 | s1 -> s3 [label="10c"]
17 | s1 -> sink [label="gum"]
18 | s2 -> s3 [label="5c"]
19 | s2 -> s3 [label="10c"]
20 | s2 -> sink [label="gum"]
21 | s3 -> s0 [label="gum"]
22 | s3 -> sink [label="10c"]
23 | s3 -> sink [label="5c"]
24 | sink -> sink [label="10c"]
25 | sink -> sink [label="5c"]
26 | sink -> sink [label="gum"]
27 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_completion_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_completion_test_02_completed.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | sink
3 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_completion_test_03.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_completion_test_03_completed.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 | sink
11 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_intersection_1_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_intersection_1_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | c1
8 | c2
9 | c3
10 | c4 [shape=doublecircle]
11 | s1 [shape=doublecircle]
12 |
13 | s0 -> c1 [label="5c"]
14 | s0 -> s1 [label="gum"]
15 | c1 -> c2 [label="10c"]
16 | c2 -> c3 [label="5c"]
17 | c2 -> c4 [label="gum"]
18 | c3 -> c1 [label="gum"]
19 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_intersection_2_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | t0 [root=true]
4 |
5 | fake -> t0 [style=bold]
6 |
7 | t1
8 | t2
9 | t3
10 | t4 [shape=doublecircle]
11 | t5 [shape=doublecircle]
12 |
13 | t0 -> t1 [label="5c"]
14 | t1 -> t2 [label="10c"]
15 | t1 -> t5 [label="5c"]
16 | t2 -> t3 [label="5c"]
17 | t2 -> t4 [label="gum"]
18 | t3 -> t1 [label="gum"]
19 | t5 -> t0 [label="gum"]
20 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_intersection_2_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | t0 [root=true]
4 |
5 | fake -> t0 [style=bold]
6 |
7 | c1
8 | c2
9 | c3
10 | c4 [shape=doublecircle]
11 | t1
12 | t2
13 | t3 [shape=doublecircle]
14 |
15 | t0 -> c1 [label="5c"]
16 | t0 -> t1 [label="10c"]
17 | c1 -> c2 [label="10c"]
18 | c2 -> c3 [label="5c"]
19 | c2 -> c4 [label="gum"]
20 | c3 -> c1 [label="gum"]
21 | t1 -> t3 [label="gum"]
22 | t1 -> t2 [label="5c"]
23 | t2 -> t0 [label="gum"]
24 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_minimization_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_minimization_test_01_s4.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s4
9 | s3
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s4 [label="10c"]
13 | s1 -> s4 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s4 -> s3 [label="5c"]
16 | s4 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_minimization_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 | s4
11 |
12 | s0 -> s1 [label="5c"]
13 | s0 -> s4 [label="10c"]
14 | s1 -> s2 [label="5c"]
15 | s1 -> s3 [label="10c"]
16 | s2 -> s3 [label="5c"]
17 | s2 -> s3 [label="10c"]
18 | s3 -> s0 [label="gum"]
19 | s4 -> s3 [label="5c"]
20 | s4 -> s3 [label="10c"]
21 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_minimization_test_03.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_minimization_test_04.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_nonemptiness_check_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_nonemptiness_check_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_projection_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_projection_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 | s4
11 |
12 | s0 -> s1 [label="5c"]
13 | s0 -> s2 [label="10c"]
14 | s1 -> s2 [label="5c"]
15 | s1 -> s3 [label="10c"]
16 | s2 -> s3 [label="5c"]
17 | s2 -> s3 [label="10c"]
18 | s3 -> s0 [label="gum"]
19 | s3 -> s4 [label="5c"]
20 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_reachable_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_reachable_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 | s4
11 |
12 | s0 -> s1 [label="5c"]
13 | s0 -> s4 [label="10c"]
14 | s1 -> s2 [label="5c"]
15 | s1 -> s3 [label="10c"]
16 | s2 -> s3 [label="5c"]
17 | s2 -> s3 [label="10c"]
18 | s3 -> s0 [label="gum"]
19 | s4 -> s3 [label="5c"]
20 | s4 -> s3 [label="10c"]
21 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_reachable_test_02_reachable.dot:
--------------------------------------------------------------------------------
1 | digraph {
2 | fake [style=invisible]
3 | "('c3', 'c3')"
4 | "('c2', 'c2')"
5 | "('c4', 'c4')" [shape=doublecircle]
6 | "('s0', 't0')" [root=true]
7 | "('c1', 'c1')"
8 | "('c3', 'c3')" -> "('c1', 'c1')" [label=gum]
9 | "('c2', 'c2')" -> "('c4', 'c4')" [label=gum]
10 | "('s0', 't0')" -> "('c1', 'c1')" [label="5c"]
11 | "('c2', 'c2')" -> "('c3', 'c3')" [label="5c"]
12 | "('c1', 'c1')" -> "('c2', 'c2')" [label="10c"]
13 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_reachable_test_03.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_reachable_test_04.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 | s4
11 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_reachable_test_05.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 | s4 [shape=doublecircle]
11 |
12 | s0 -> s1 [label="5c"]
13 | s0 -> s2 [label="10c"]
14 | s1 -> s2 [label="5c"]
15 | s1 -> s3 [label="10c"]
16 | s2 -> s3 [label="5c"]
17 | s2 -> s3 [label="10c"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_renaming_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_run_acceptance_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_run_acceptance_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3 [shape=doublecircle]
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_run_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_run_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3 [shape=doublecircle]
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_trimming_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph {
2 | fake [style=invisible]
3 | "('c3', 't1')"
4 | "('s0', 't0')" [root=true]
5 | "('c3', 'c1')"
6 | "('c1', 'c1')"
7 | "('c2', 'c2')"
8 | "('c4', 't3')" [shape=doublecircle]
9 | "('c4', 'c3')"
10 | "('c2', 't1')"
11 | "('c4', 't2')"
12 | "('s0', 't3')"
13 | "('c1', 'c4')"
14 | "('c2', 'c3')"
15 | "('c4', 'c4')" [shape=doublecircle]
16 | "('c2', 'c4')"
17 | "('c1', 't1')"
18 | "('s1', 'c2')"
19 | "('c1', 'c2')"
20 | "('s1', 't1')"
21 | "('s1', 't2')"
22 | "('c3', 't3')"
23 | "('c4', 'c2')"
24 | "('c3', 't2')"
25 | "('c2', 't2')"
26 | "('c4', 't1')"
27 | "('s0', 't1')"
28 | "('s0', 'c3')"
29 | "('s0', 't2')"
30 | "('s1', 'c4')" [shape=doublecircle]
31 | "('c2', 't3')"
32 | "('c2', 't0')"
33 | "('c4', 't0')"
34 | "('s0', 'c2')"
35 | "('c3', 'c4')"
36 | "('c1', 't0')"
37 | "('s0', 'c4')"
38 | "('c1', 't3')"
39 | "('s0', 'c1')"
40 | "('c1', 'c3')"
41 | "('c3', 't0')"
42 | "('s1', 't0')"
43 | "('c3', 'c2')"
44 | "('c4', 'c1')"
45 | "('c2', 'c1')"
46 | "('c1', 't2')"
47 | "('s1', 'c3')"
48 | "('s1', 't3')" [shape=doublecircle]
49 | "('s1', 'c1')"
50 | "('c3', 'c3')"
51 | "('c9', 'c9')"
52 | "('c19', 'c19')"
53 | fake -> "('s0', 't0')" [style=bold]
54 | "('c2', 'c2')" -> "('c4', 'c4')" [label=gum]
55 | "('c2', 't0')" -> "('c3', 'c1')" [label="5c"]
56 | "('s0', 't1')" -> "('s1', 't3')" [label=gum]
57 | "('c2', 'c2')" -> "('c3', 'c3')" [label="5c"]
58 | "('c3', 't2')" -> "('c1', 't0')" [label=gum]
59 | "('s0', 't1')" -> "('c1', 't2')" [label="5c"]
60 | "('c2', 't1')" -> "('c4', 't3')" [label=gum]
61 | "('c3', 't1')" -> "('c1', 't3')" [label=gum]
62 | "('s0', 'c2')" -> "('s1', 'c4')" [label=gum]
63 | "('c2', 't1')" -> "('c3', 't2')" [label="5c"]
64 | "('c1', 'c1')" -> "('c2', 'c2')" [label="10c"]
65 | "('s0', 't0')" -> "('c1', 'c1')" [label="5c"]
66 | "('c1', 't0')" -> "('c2', 't1')" [label="10c"]
67 | "('s0', 'c2')" -> "('c1', 'c3')" [label="5c"]
68 | "('s0', 't2')" -> "('s1', 't0')" [label=gum]
69 | "('c3', 'c3')" -> "('c1', 'c1')" [label=gum]
70 | "('c2', 'c3')" -> "('c4', 'c1')" [label=gum]
71 | "('c2', 't2')" -> "('c4', 't0')" [label=gum]
72 | "('c3', 'c2')" -> "('c1', 'c4')" [label=gum]
73 | "('s0', 'c3')" -> "('s1', 'c1')" [label=gum]
74 | "('c3', 'c3')" -> "('c9', 'c9')" [label="10c"]
75 | "('c19', 'c19')" -> "('s0', 't0')" [label="10c"]
76 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_trimming_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_trimming_test_03.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 | s4
11 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_trimming_test_04.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2 [shape=doublecircle]
9 | s3
10 | s4
11 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_union_1_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_union_1_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | c1
8 | c2
9 | c3
10 | c4 [shape=doublecircle]
11 | s1 [shape=doublecircle]
12 |
13 | s0 -> c1 [label="5c"]
14 | s0 -> s1 [label="gum"]
15 | c1 -> c2 [label="10c"]
16 | c2 -> c3 [label="5c"]
17 | c2 -> c4 [label="gum"]
18 | c3 -> c1 [label="gum"]
19 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_union_2_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | t0 [root=true]
4 |
5 | fake -> t0 [style=bold]
6 |
7 | t1
8 | t2
9 | t3
10 | t4 [shape=doublecircle]
11 | t5 [shape=doublecircle]
12 |
13 | t0 -> t1 [label="5c"]
14 | t1 -> t2 [label="10c"]
15 | t1 -> t5 [label="5c"]
16 | t2 -> t3 [label="5c"]
17 | t2 -> t4 [label="gum"]
18 | t3 -> t1 [label="gum"]
19 | t5 -> t0 [label="gum"]
20 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_union_2_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | t0 [root=true]
4 |
5 | fake -> t0 [style=bold]
6 |
7 | c1
8 | c2
9 | c3
10 | c4 [shape=doublecircle]
11 | t1
12 | t2
13 | t3 [shape=doublecircle]
14 |
15 | t0 -> c1 [label="5c"]
16 | t0 -> t1 [label="10c"]
17 | c1 -> c2 [label="10c"]
18 | c2 -> c3 [label="5c"]
19 | c2 -> c4 [label="gum"]
20 | c3 -> c1 [label="gum"]
21 | t1 -> t3 [label="gum"]
22 | t1 -> t2 [label="5c"]
23 | t2 -> t0 [label="gum"]
24 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_word_acceptance_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true, shape=doublecircle]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/dfa/dfa_word_acceptance_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2
9 | s3 [shape=doublecircle]
10 |
11 | s0 -> s1 [label="5c"]
12 | s0 -> s2 [label="10c"]
13 | s1 -> s2 [label="5c"]
14 | s1 -> s3 [label="10c"]
15 | s2 -> s3 [label="5c"]
16 | s2 -> s3 [label="10c"]
17 | s3 -> s0 [label="gum"]
18 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_complementation_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2 [shape=doublecircle]
9 | s3
10 | s4
11 |
12 | s0 -> s1 [label="a"]
13 | s0 -> s3 [label="b"]
14 | s1 -> s2 [label="b"]
15 | s1 -> s4 [label="a"]
16 | s2 -> s2 [label="b"]
17 | s2 -> s0 [label="b"]
18 | s3 -> s2 [label="a"]
19 | s3 -> s4 [label="b"]
20 | s4 -> s4 [label="a"]
21 | s4 -> s0 [label="b"]
22 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_determinization_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2 [shape=doublecircle]
9 | s3
10 | s4
11 |
12 | s0 -> s1 [label="a"]
13 | s0 -> s3 [label="b"]
14 | s1 -> s2 [label="b"]
15 | s1 -> s4 [label="a"]
16 | s2 -> s2 [label="b"]
17 | s2 -> s0 [label="b"]
18 | s3 -> s2 [label="a"]
19 | s3 -> s4 [label="b"]
20 | s4 -> s4 [label="a"]
21 | s4 -> s0 [label="b"]
22 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_determinization_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | t0 [root=true, shape=doublecircle]
4 |
5 | fake -> t0 [style=bold]
6 |
7 | t1
8 | t2
9 | t3
10 | t4 [shape=doublecircle]
11 |
12 |
13 | t0 -> t1 [label="b"]
14 | t0 -> t2 [label="a"]
15 | t1 -> t2 [label="c"]
16 | t1 -> t3 [label="c"]
17 | t1 -> t4 [label="b"]
18 | t2 -> t4 [label="a"]
19 | t2 -> t2 [label="a"]
20 | t2 -> t1 [label="b"]
21 | t3 -> t3 [label="b"]
22 | t3 -> t1 [label="a"]
23 | t3 -> t4 [label="a"]
24 | t3 -> t0 [label="b"]
25 | t3 -> t0 [label="c"]
26 | t4 -> t0 [label="c"]
27 | t4 -> t0 [label="b"]
28 | t4 -> t4 [label="a"]
29 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_interestingness_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2 [shape=doublecircle]
9 | s3
10 | s4
11 |
12 | s0 -> s1 [label="a"]
13 | s0 -> s3 [label="b"]
14 | s1 -> s2 [label="b"]
15 | s1 -> s4 [label="a"]
16 | s2 -> s2 [label="b"]
17 | s2 -> s0 [label="b"]
18 | s3 -> s2 [label="a"]
19 | s3 -> s4 [label="b"]
20 | s4 -> s4 [label="a"]
21 | s4 -> s0 [label="b"]
22 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_interestingness_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1 [shape=doublecircle]
8 | s2
9 | s3 [shape=doublecircle]
10 | s4
11 |
12 | s0 -> s1 [label="a"]
13 | s0 -> s3 [label="b"]
14 | s1 -> s2 [label="b"]
15 | s1 -> s1 [label="a"]
16 | s1 -> s1 [label="b"]
17 | s1 -> s4 [label="a"]
18 | s2 -> s2 [label="b"]
19 | s2 -> s2 [label="a"]
20 | s2 -> s0 [label="b"]
21 | s3 -> s2 [label="a"]
22 | s3 -> s3 [label="b"]
23 | s3 -> s3 [label="a"]
24 | s3 -> s4 [label="b"]
25 | s4 -> s4 [label="a"]
26 | s4 -> s0 [label="b"]
27 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_intersection_1_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2 [shape=doublecircle]
9 | s3
10 | s4
11 |
12 | s0 -> s1 [label="a"]
13 | s0 -> s3 [label="b"]
14 | s1 -> s2 [label="b"]
15 | s1 -> s4 [label="a"]
16 | s2 -> s2 [label="b"]
17 | s2 -> s0 [label="b"]
18 | s3 -> s2 [label="a"]
19 | s3 -> s4 [label="b"]
20 | s4 -> s4 [label="a"]
21 | s4 -> s0 [label="b"]
22 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_intersection_2_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | t0 [root=true, shape=doublecircle]
4 |
5 | fake -> t0 [style=bold]
6 |
7 | t1
8 | t2
9 | t3
10 | t4 [shape=doublecircle]
11 |
12 |
13 | t0 -> t1 [label="b"]
14 | t0 -> t2 [label="a"]
15 | t1 -> t2 [label="c"]
16 | t1 -> t3 [label="c"]
17 | t1 -> t4 [label="b"]
18 | t2 -> t4 [label="a"]
19 | t2 -> t2 [label="a"]
20 | t2 -> t1 [label="b"]
21 | t3 -> t3 [label="b"]
22 | t3 -> t1 [label="a"]
23 | t3 -> t4 [label="a"]
24 | t3 -> t0 [label="b"]
25 | t3 -> t0 [label="c"]
26 | t4 -> t0 [label="c"]
27 | t4 -> t0 [label="b"]
28 | t4 -> t4 [label="a"]
29 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_nonemptiness_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2 [shape=doublecircle]
9 | s3
10 | s4
11 |
12 | s0 -> s1 [label="a"]
13 | s0 -> s3 [label="b"]
14 | s1 -> s2 [label="b"]
15 | s1 -> s4 [label="a"]
16 | s2 -> s2 [label="b"]
17 | s2 -> s0 [label="b"]
18 | s3 -> s2 [label="a"]
19 | s3 -> s4 [label="b"]
20 | s4 -> s4 [label="a"]
21 | s4 -> s0 [label="b"]
22 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_nonemptiness_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2 [shape=doublecircle]
9 | s3
10 | s4
11 |
12 | s0 -> s1 [label="a"]
13 | s0 -> s3 [label="b"]
14 |
15 | s1 -> s4 [label="a"]
16 | s2 -> s2 [label="b"]
17 | s2 -> s0 [label="b"]
18 | s3 -> s4 [label="b"]
19 | s4 -> s4 [label="a"]
20 | s4 -> s0 [label="b"]
21 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_nonuniversality_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2 [shape=doublecircle]
9 | s3
10 | s4
11 |
12 | s0 -> s1 [label="a"]
13 | s0 -> s3 [label="b"]
14 | s1 -> s2 [label="b"]
15 | s1 -> s4 [label="a"]
16 | s2 -> s2 [label="b"]
17 | s2 -> s0 [label="b"]
18 | s3 -> s2 [label="a"]
19 | s3 -> s4 [label="b"]
20 | s4 -> s4 [label="a"]
21 | s4 -> s0 [label="b"]
22 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_nonuniversality_test_02.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1 [shape=doublecircle]
8 | s2
9 | s3 [shape=doublecircle]
10 | s4
11 |
12 | s0 -> s1 [label="a"]
13 | s0 -> s3 [label="b"]
14 | s1 -> s2 [label="b"]
15 | s1 -> s1 [label="a"]
16 | s1 -> s1 [label="b"]
17 | s1 -> s4 [label="a"]
18 | s2 -> s2 [label="b"]
19 | s2 -> s2 [label="a"]
20 | s2 -> s0 [label="b"]
21 | s3 -> s2 [label="a"]
22 | s3 -> s3 [label="b"]
23 | s3 -> s3 [label="a"]
24 | s3 -> s4 [label="b"]
25 | s4 -> s4 [label="a"]
26 | s4 -> s0 [label="b"]
27 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_renaming_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2 [shape=doublecircle]
9 | s3
10 | s4
11 |
12 | s0 -> s1 [label="a"]
13 | s0 -> s3 [label="b"]
14 | s1 -> s2 [label="b"]
15 | s1 -> s4 [label="a"]
16 | s2 -> s2 [label="b"]
17 | s2 -> s0 [label="b"]
18 | s3 -> s2 [label="a"]
19 | s3 -> s4 [label="b"]
20 | s4 -> s4 [label="a"]
21 | s4 -> s0 [label="b"]
22 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_run_acceptance_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2 [shape=doublecircle]
9 | s3
10 | s4
11 |
12 | s0 -> s1 [label="a"]
13 | s0 -> s3 [label="b"]
14 | s1 -> s2 [label="b"]
15 | s1 -> s4 [label="a"]
16 | s2 -> s2 [label="b"]
17 | s2 -> s0 [label="b"]
18 | s3 -> s2 [label="a"]
19 | s3 -> s4 [label="b"]
20 | s4 -> s4 [label="a"]
21 | s4 -> s0 [label="b"]
22 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_union_1_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2 [shape=doublecircle]
9 | s3
10 | s4
11 |
12 | s0 -> s1 [label="a"]
13 | s0 -> s3 [label="b"]
14 | s1 -> s2 [label="b"]
15 | s1 -> s4 [label="a"]
16 | s2 -> s2 [label="b"]
17 | s2 -> s0 [label="b"]
18 | s3 -> s2 [label="a"]
19 | s3 -> s4 [label="b"]
20 | s4 -> s4 [label="a"]
21 | s4 -> s0 [label="b"]
22 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_union_2_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | t0 [root=true, shape=doublecircle]
4 |
5 | fake -> t0 [style=bold]
6 |
7 | t1
8 | t2
9 | t3
10 | t4 [shape=doublecircle]
11 |
12 |
13 | t0 -> t1 [label="b"]
14 | t0 -> t2 [label="a"]
15 | t1 -> t2 [label="c"]
16 | t1 -> t3 [label="c"]
17 | t1 -> t4 [label="b"]
18 | t2 -> t4 [label="a"]
19 | t2 -> t2 [label="a"]
20 | t2 -> t1 [label="b"]
21 | t3 -> t3 [label="b"]
22 | t3 -> t1 [label="a"]
23 | t3 -> t4 [label="a"]
24 | t3 -> t0 [label="b"]
25 | t3 -> t0 [label="c"]
26 | t4 -> t0 [label="c"]
27 | t4 -> t0 [label="b"]
28 | t4 -> t4 [label="a"]
29 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_union_3_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | c0 [root=true]
4 |
5 | fake -> c0 [style=bold]
6 |
7 |
8 | t1
9 | t2
10 | t3
11 | c4 [shape=doublecircle]
12 | c1
13 | c2
14 | c3
15 |
16 | c0 -> t2 [label="a"]
17 | c0 -> t1 [label="a"]
18 | c0 -> c1 [label="b"]
19 | t1 -> t2 [label="c"]
20 | t1 -> t3 [label="c"]
21 | t2 -> t2 [label="a"]
22 | t2 -> t1 [label="b"]
23 | t2 -> c2 [label="b"]
24 | t3 -> t3 [label="b"]
25 | t3 -> t1 [label="a"]
26 | t3 -> c4 [label="c"]
27 | c2 -> c3 [label="c"]
28 | c1 -> c3 [label="c"]
29 | c3 -> c4 [label="b"]
30 | c3 -> c4 [label="a"]
31 | }
--------------------------------------------------------------------------------
/tests/dot/nfa/nfa_word_acceptance_test_01.dot:
--------------------------------------------------------------------------------
1 | digraph{
2 | fake [style=invisible]
3 | s0 [root=true]
4 |
5 | fake -> s0 [style=bold]
6 |
7 | s1
8 | s2 [shape=doublecircle]
9 | s3
10 | s4
11 |
12 | s0 -> s1 [label="a"]
13 | s0 -> s3 [label="b"]
14 | s1 -> s2 [label="b"]
15 | s1 -> s4 [label="a"]
16 | s2 -> s2 [label="b"]
17 | s2 -> s0 [label="b"]
18 | s3 -> s2 [label="a"]
19 | s3 -> s4 [label="b"]
20 | s4 -> s4 [label="a"]
21 | s4 -> s0 [label="b"]
22 | }
--------------------------------------------------------------------------------
/tests/json/afw/afw_afw_to_nfa_test_01.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s",
8 | "q0",
9 | "q1",
10 | "q2"
11 | ],
12 | "initial_state": "s",
13 | "accepting_states": [
14 | "q0",
15 | "s"
16 | ],
17 | "transitions": [
18 | [
19 | "q0",
20 | "b",
21 | "q0 or q2"
22 | ],
23 | [
24 | "q0",
25 | "a",
26 | "q1"
27 | ],
28 | [
29 | "q1",
30 | "a",
31 | "q0"
32 | ],
33 | [
34 | "q1",
35 | "b",
36 | "q1 or q2"
37 | ],
38 | [
39 | "q2",
40 | "a",
41 | "q2"
42 | ],
43 | [
44 | "s",
45 | "b",
46 | "s and q0"
47 | ],
48 | [
49 | "s",
50 | "a",
51 | "s"
52 | ]
53 | ]
54 | }
55 |
56 |
--------------------------------------------------------------------------------
/tests/json/afw/afw_complementation_test_01.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s",
8 | "q0",
9 | "q1",
10 | "q2"
11 | ],
12 | "initial_state": "s",
13 | "accepting_states": [
14 | "q0",
15 | "s"
16 | ],
17 | "transitions": [
18 | [
19 | "q0",
20 | "b",
21 | "q0 or q2"
22 | ],
23 | [
24 | "q0",
25 | "a",
26 | "q1"
27 | ],
28 | [
29 | "q1",
30 | "a",
31 | "q0"
32 | ],
33 | [
34 | "q1",
35 | "b",
36 | "q1 or q2"
37 | ],
38 | [
39 | "q2",
40 | "a",
41 | "q2"
42 | ],
43 | [
44 | "s",
45 | "b",
46 | "s and q0"
47 | ],
48 | [
49 | "s",
50 | "a",
51 | "s"
52 | ]
53 | ]
54 | }
55 |
56 |
--------------------------------------------------------------------------------
/tests/json/afw/afw_completion_test_01.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s",
8 | "q0",
9 | "q1",
10 | "q2"
11 | ],
12 | "initial_state": "s",
13 | "accepting_states": [
14 | "q0",
15 | "s"
16 | ],
17 | "transitions": [
18 | [
19 | "q0",
20 | "b",
21 | "q0 or q2"
22 | ],
23 | [
24 | "q0",
25 | "a",
26 | "q1"
27 | ],
28 | [
29 | "q1",
30 | "a",
31 | "q0"
32 | ],
33 | [
34 | "q1",
35 | "b",
36 | "q1 or q2"
37 | ],
38 | [
39 | "q2",
40 | "a",
41 | "q2"
42 | ],
43 | [
44 | "s",
45 | "b",
46 | "s and q0"
47 | ],
48 | [
49 | "s",
50 | "a",
51 | "s"
52 | ]
53 | ]
54 | }
55 |
56 |
--------------------------------------------------------------------------------
/tests/json/afw/afw_intersection_1_test_01.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s",
8 | "q0",
9 | "q1",
10 | "q2"
11 | ],
12 | "initial_state": "s",
13 | "accepting_states": [
14 | "q0",
15 | "s"
16 | ],
17 | "transitions": [
18 | [
19 | "q0",
20 | "b",
21 | "q0 or q2"
22 | ],
23 | [
24 | "q0",
25 | "a",
26 | "q1"
27 | ],
28 | [
29 | "q1",
30 | "a",
31 | "q0"
32 | ],
33 | [
34 | "q1",
35 | "b",
36 | "q1 or q2"
37 | ],
38 | [
39 | "q2",
40 | "a",
41 | "q2"
42 | ],
43 | [
44 | "s",
45 | "b",
46 | "s and q0"
47 | ],
48 | [
49 | "s",
50 | "a",
51 | "s"
52 | ]
53 | ]
54 | }
55 |
56 |
--------------------------------------------------------------------------------
/tests/json/afw/afw_intersection_2_test_01.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s_root",
8 | "s0",
9 | "s1",
10 | "s2",
11 | "s3",
12 | "s4"
13 | ],
14 | "initial_state": "s_root",
15 | "accepting_states": [
16 | "s2"
17 | ],
18 | "transitions": [
19 | [
20 | "s_root",
21 | "b",
22 | "s3"
23 | ],
24 | [
25 | "s_root",
26 | "a",
27 | "s1"
28 | ],
29 | [
30 | "s0",
31 | "b",
32 | "s3"
33 | ],
34 | [
35 | "s0",
36 | "a",
37 | "s1"
38 | ],
39 | [
40 | "s1",
41 | "a",
42 | "s4"
43 | ],
44 | [
45 | "s1",
46 | "b",
47 | "s2"
48 | ],
49 | [
50 | "s2",
51 | "b",
52 | "s0 or s2"
53 | ],
54 | [
55 | "s3",
56 | "a",
57 | "s2"
58 | ],
59 | [
60 | "s3",
61 | "b",
62 | "s4"
63 | ],
64 | [
65 | "s4",
66 | "a",
67 | "s4"
68 | ],
69 | [
70 | "s4",
71 | "b",
72 | "s0"
73 | ]
74 | ]
75 | }
76 |
77 |
--------------------------------------------------------------------------------
/tests/json/afw/afw_intersection_3_test_01.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s_root",
8 | "q0",
9 | "s1",
10 | "s2",
11 | "s3",
12 | "s4"
13 | ],
14 | "initial_state": "s_root",
15 | "accepting_states": [
16 | "s2"
17 | ],
18 | "transitions": [
19 | [
20 | "s_root",
21 | "b",
22 | "s3"
23 | ],
24 | [
25 | "s_root",
26 | "a",
27 | "s1"
28 | ],
29 | [
30 | "q0",
31 | "b",
32 | "s3"
33 | ],
34 | [
35 | "q0",
36 | "a",
37 | "s1"
38 | ],
39 | [
40 | "s1",
41 | "a",
42 | "s4"
43 | ],
44 | [
45 | "s1",
46 | "b",
47 | "s2"
48 | ],
49 | [
50 | "s2",
51 | "b",
52 | "q0 or s2"
53 | ],
54 | [
55 | "s3",
56 | "a",
57 | "s2"
58 | ],
59 | [
60 | "s3",
61 | "b",
62 | "s4"
63 | ],
64 | [
65 | "s4",
66 | "a",
67 | "s4"
68 | ],
69 | [
70 | "s4",
71 | "b",
72 | "q0"
73 | ]
74 | ]
75 | }
76 |
77 |
--------------------------------------------------------------------------------
/tests/json/afw/afw_nfa_to_afw_test_01.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "root",
8 | "s0",
9 | "s1",
10 | "s2",
11 | "s3",
12 | "s4"
13 | ],
14 | "initial_state": "root",
15 | "accepting_states": [
16 | "s2"
17 | ],
18 | "transitions": [
19 | [
20 | "root",
21 | "b",
22 | "s3"
23 | ],
24 | [
25 | "root",
26 | "a",
27 | "s1"
28 | ],
29 | [
30 | "s0",
31 | "b",
32 | "s3"
33 | ],
34 | [
35 | "s0",
36 | "a",
37 | "s1"
38 | ],
39 | [
40 | "s1",
41 | "a",
42 | "s4"
43 | ],
44 | [
45 | "s1",
46 | "b",
47 | "s2"
48 | ],
49 | [
50 | "s2",
51 | "b",
52 | "s0 or s2"
53 | ],
54 | [
55 | "s3",
56 | "a",
57 | "s2"
58 | ],
59 | [
60 | "s3",
61 | "b",
62 | "s4"
63 | ],
64 | [
65 | "s4",
66 | "a",
67 | "s4"
68 | ],
69 | [
70 | "s4",
71 | "b",
72 | "s0"
73 | ]
74 | ]
75 | }
76 |
77 |
--------------------------------------------------------------------------------
/tests/json/afw/afw_nonemptiness_check_test_1.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s",
8 | "q0",
9 | "q1",
10 | "q2"
11 | ],
12 | "initial_state": "s",
13 | "accepting_states": [
14 | "q0",
15 | "s"
16 | ],
17 | "transitions": [
18 | [
19 | "q0",
20 | "b",
21 | "q0 or q2"
22 | ],
23 | [
24 | "q0",
25 | "a",
26 | "q1"
27 | ],
28 | [
29 | "q1",
30 | "a",
31 | "q0"
32 | ],
33 | [
34 | "q1",
35 | "b",
36 | "q1 or q2"
37 | ],
38 | [
39 | "q2",
40 | "a",
41 | "q2"
42 | ],
43 | [
44 | "s",
45 | "b",
46 | "s and q0"
47 | ],
48 | [
49 | "s",
50 | "a",
51 | "s"
52 | ]
53 | ]
54 | }
55 |
56 |
--------------------------------------------------------------------------------
/tests/json/afw/afw_nonemptiness_check_test_2.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s_root",
8 | "s0",
9 | "s1",
10 | "s2",
11 | "s3",
12 | "s4"
13 | ],
14 | "initial_state": "s_root",
15 | "accepting_states": [
16 | "s2"
17 | ],
18 | "transitions": [
19 | [
20 | "s_root",
21 | "b",
22 | "s3"
23 | ],
24 | [
25 | "s_root",
26 | "a",
27 | "s1"
28 | ],
29 | [
30 | "s0",
31 | "b",
32 | "s3"
33 | ],
34 | [
35 | "s0",
36 | "a",
37 | "s1"
38 | ],
39 | [
40 | "s1",
41 | "a",
42 | "s4"
43 | ],
44 | [
45 | "s1",
46 | "b",
47 | "s2 and s1"
48 | ],
49 | [
50 | "s2",
51 | "b",
52 | "s0 or s2"
53 | ],
54 | [
55 | "s3",
56 | "a",
57 | "s2 and s3"
58 | ],
59 | [
60 | "s3",
61 | "b",
62 | "s4"
63 | ],
64 | [
65 | "s4",
66 | "a",
67 | "s4"
68 | ],
69 | [
70 | "s4",
71 | "b",
72 | "s0"
73 | ]
74 | ]
75 | }
76 |
77 |
--------------------------------------------------------------------------------
/tests/json/afw/afw_nonuniversality_check_test_1.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s",
8 | "q0",
9 | "q1",
10 | "q2"
11 | ],
12 | "initial_state": "s",
13 | "accepting_states": [
14 | "q0",
15 | "s"
16 | ],
17 | "transitions": [
18 | [
19 | "q0",
20 | "b",
21 | "q0 or q2"
22 | ],
23 | [
24 | "q0",
25 | "a",
26 | "q1"
27 | ],
28 | [
29 | "q1",
30 | "a",
31 | "q0"
32 | ],
33 | [
34 | "q1",
35 | "b",
36 | "q1 or q2"
37 | ],
38 | [
39 | "q2",
40 | "a",
41 | "q2"
42 | ],
43 | [
44 | "s",
45 | "b",
46 | "s and q0"
47 | ],
48 | [
49 | "s",
50 | "a",
51 | "s"
52 | ]
53 | ]
54 | }
55 |
56 |
--------------------------------------------------------------------------------
/tests/json/afw/afw_nonuniversality_check_test_2.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s_root",
8 | "s0",
9 | "s1",
10 | "s2",
11 | "s3",
12 | "s4"
13 | ],
14 | "initial_state": "s_root",
15 | "accepting_states": [
16 | "s_root",
17 | "s0",
18 | "s1",
19 | "s3",
20 | "s4"
21 | ],
22 | "transitions": [
23 | [
24 | "s_root",
25 | "b",
26 | "s3"
27 | ],
28 | [
29 | "s1",
30 | "a",
31 | "s4"
32 | ],
33 | [
34 | "s_root",
35 | "b",
36 | "s3"
37 | ],
38 | [
39 | "s0",
40 | "a",
41 | "s1"
42 | ],
43 | [
44 | "s4",
45 | "b",
46 | "s0"
47 | ],
48 | [
49 | "s_root",
50 | "a",
51 | "s1"
52 | ],
53 | [
54 | "s2",
55 | "b",
56 | "s0 and s2"
57 | ],
58 | [
59 | "s1",
60 | "b",
61 | "s2 or s1"
62 | ],
63 | [
64 | "s2",
65 | "a",
66 | "True"
67 | ],
68 | [
69 | "s3",
70 | "a",
71 | "s2 or s3"
72 | ],
73 | [
74 | "s4",
75 | "a",
76 | "s4"
77 | ],
78 | [
79 | "s0",
80 | "b",
81 | "s3"
82 | ],
83 | [
84 | "s3",
85 | "b",
86 | "s4"
87 | ]
88 | ]
89 | }
90 |
91 |
--------------------------------------------------------------------------------
/tests/json/afw/afw_union_1_test_01.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s",
8 | "q0",
9 | "q1",
10 | "q2"
11 | ],
12 | "initial_state": "s",
13 | "accepting_states": [
14 | "q0",
15 | "s"
16 | ],
17 | "transitions": [
18 | [
19 | "q0",
20 | "b",
21 | "q0 or q2"
22 | ],
23 | [
24 | "q0",
25 | "a",
26 | "q1"
27 | ],
28 | [
29 | "q1",
30 | "a",
31 | "q0"
32 | ],
33 | [
34 | "q1",
35 | "b",
36 | "q1 or q2"
37 | ],
38 | [
39 | "q2",
40 | "a",
41 | "q2"
42 | ],
43 | [
44 | "s",
45 | "b",
46 | "s and q0"
47 | ],
48 | [
49 | "s",
50 | "a",
51 | "s"
52 | ]
53 | ]
54 | }
55 |
56 |
--------------------------------------------------------------------------------
/tests/json/afw/afw_union_2_test_01.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s_root",
8 | "s0",
9 | "s1",
10 | "s2",
11 | "s3",
12 | "s4"
13 | ],
14 | "initial_state": "s_root",
15 | "accepting_states": [
16 | "s2"
17 | ],
18 | "transitions": [
19 | [
20 | "s_root",
21 | "b",
22 | "s3"
23 | ],
24 | [
25 | "s_root",
26 | "a",
27 | "s1"
28 | ],
29 | [
30 | "s0",
31 | "b",
32 | "s3"
33 | ],
34 | [
35 | "s0",
36 | "a",
37 | "s1"
38 | ],
39 | [
40 | "s1",
41 | "a",
42 | "s4"
43 | ],
44 | [
45 | "s1",
46 | "b",
47 | "s2"
48 | ],
49 | [
50 | "s2",
51 | "b",
52 | "s0 or s2"
53 | ],
54 | [
55 | "s3",
56 | "a",
57 | "s2"
58 | ],
59 | [
60 | "s3",
61 | "b",
62 | "s4"
63 | ],
64 | [
65 | "s4",
66 | "a",
67 | "s4"
68 | ],
69 | [
70 | "s4",
71 | "b",
72 | "s0"
73 | ]
74 | ]
75 | }
76 |
77 |
--------------------------------------------------------------------------------
/tests/json/afw/afw_union_3_test_01.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s_root",
8 | "q0",
9 | "s1",
10 | "s2",
11 | "s3",
12 | "s4"
13 | ],
14 | "initial_state": "s_root",
15 | "accepting_states": [
16 | "s2"
17 | ],
18 | "transitions": [
19 | [
20 | "s_root",
21 | "b",
22 | "s3"
23 | ],
24 | [
25 | "s_root",
26 | "a",
27 | "s1"
28 | ],
29 | [
30 | "q0",
31 | "b",
32 | "s3"
33 | ],
34 | [
35 | "q0",
36 | "a",
37 | "s1"
38 | ],
39 | [
40 | "s1",
41 | "a",
42 | "s4"
43 | ],
44 | [
45 | "s1",
46 | "b",
47 | "s2"
48 | ],
49 | [
50 | "s2",
51 | "b",
52 | "q0 or s2"
53 | ],
54 | [
55 | "s3",
56 | "a",
57 | "s2"
58 | ],
59 | [
60 | "s3",
61 | "b",
62 | "s4"
63 | ],
64 | [
65 | "s4",
66 | "a",
67 | "s4"
68 | ],
69 | [
70 | "s4",
71 | "b",
72 | "q0"
73 | ]
74 | ]
75 | }
76 |
77 |
--------------------------------------------------------------------------------
/tests/json/afw/afw_word_acceptance_test_01.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s",
8 | "q0",
9 | "q1",
10 | "q2"
11 | ],
12 | "initial_state": "s",
13 | "accepting_states": [
14 | "q0",
15 | "s"
16 | ],
17 | "transitions": [
18 | [
19 | "q0",
20 | "b",
21 | "q0 or q2"
22 | ],
23 | [
24 | "q0",
25 | "a",
26 | "q1"
27 | ],
28 | [
29 | "q1",
30 | "a",
31 | "q0"
32 | ],
33 | [
34 | "q1",
35 | "b",
36 | "q1 or q2"
37 | ],
38 | [
39 | "q2",
40 | "a",
41 | "q2"
42 | ],
43 | [
44 | "s",
45 | "b",
46 | "s and q0"
47 | ],
48 | [
49 | "s",
50 | "a",
51 | "s"
52 | ]
53 | ]
54 | }
55 |
56 |
--------------------------------------------------------------------------------
/tests/json/automata_io/afw_json_importer_1.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s",
8 | "q0",
9 | "q1",
10 | "q2"
11 | ],
12 | "initial_state": "s",
13 | "accepting_states": [
14 | "q0",
15 | "s"
16 | ],
17 | "transitions": [
18 | ["q0", "b", "q0 or q2"],
19 | ["q0", "a", "q1"],
20 | ["q1", "a", "q0"],
21 | ["q1", "b", "q1 or q2"],
22 | ["q2", "a", "q2"],
23 | ["s", "b", "s and q0"],
24 | ["s", "a", "s"]
25 | ]
26 | }
--------------------------------------------------------------------------------
/tests/json/automata_io/automata_io_afw_json_importer_test_01.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b"
5 | ],
6 | "states": [
7 | "s",
8 | "q0",
9 | "q1",
10 | "q2"
11 | ],
12 | "initial_state": "s",
13 | "accepting_states": [
14 | "q0"
15 | ],
16 | "transitions": [
17 | [
18 | "q0",
19 | "b",
20 | "q0 or q2"
21 | ],
22 | [
23 | "q0",
24 | "a",
25 | "q1"
26 | ],
27 | [
28 | "q1",
29 | "a",
30 | "q0"
31 | ],
32 | [
33 | "q1",
34 | "b",
35 | "q1 or q2"
36 | ],
37 | [
38 | "q2",
39 | "a",
40 | "q2"
41 | ],
42 | [
43 | "s",
44 | "b",
45 | "s and q0"
46 | ],
47 | [
48 | "s",
49 | "a",
50 | "s"
51 | ]
52 | ]
53 | }
54 |
55 |
--------------------------------------------------------------------------------
/tests/json/dfa/dfa_export_to_json_1.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "5c",
4 | "10c",
5 | "gum"
6 | ],
7 | "states": [
8 | "s0",
9 | "s1",
10 | "s2",
11 | "s3",
12 | "s4"
13 | ],
14 | "initial_state": "s0",
15 | "accepting_states": [
16 | "s0",
17 | "s2"
18 | ],
19 | "transitions": [
20 | ["s0","5c","s1"],
21 | ["s0","10c","s4"],
22 | ["s1","5c","s2"],
23 | ["s1","10c","s3"],
24 | ["s2","5c","s3"],
25 | ["s2","10c","s3"],
26 | ["s4","5c","s3"],
27 | ["s4","10c","s3"],
28 | ["s3","gum","s0"]
29 | ]
30 | }
--------------------------------------------------------------------------------
/tests/json/dfa/dfa_json_importer_01.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "5c",
4 | "10c",
5 | "gum"
6 | ],
7 | "states": [
8 | "s0",
9 | "s1",
10 | "s2",
11 | "s3",
12 | "s4"
13 | ],
14 | "initial_state": "s0",
15 | "accepting_states": [
16 | "s0",
17 | "s2"
18 | ],
19 | "transitions": [
20 | ["s0","5c","s1"],
21 | ["s0","10c","s4"],
22 | ["s1","5c","s2"],
23 | ["s1","10c","s3"],
24 | ["s2","5c","s3"],
25 | ["s2","10c","s3"],
26 | ["s4","5c","s3"],
27 | ["s4","10c","s3"],
28 | ["s3","gum","s0"]
29 | ]
30 | }
--------------------------------------------------------------------------------
/tests/json/nfa/nfa_json_importer_1.json:
--------------------------------------------------------------------------------
1 | {
2 | "alphabet": [
3 | "a",
4 | "b",
5 | "c"
6 | ],
7 | "states": [
8 | "a0",
9 | "t0",
10 | "t1",
11 | "t2",
12 | "t3",
13 | "t4"
14 | ],
15 | "initial_states": [
16 | "t0",
17 | "a0"
18 | ],
19 | "accepting_states": [
20 | "t0",
21 | "t4",
22 | "a0"
23 | ],
24 | "transitions": [
25 | ["t0","b","t1"],
26 | ["t0","a","t2"],
27 | ["t1","c","t3"],
28 | ["t1","c","t2"],
29 | ["t1","b","t4"],
30 | ["t2","b","t1"],
31 | ["t2","a","t2"],
32 | ["t2","a","t4"],
33 | ["t3","c","t0"],
34 | ["t3","b","t0"],
35 | ["t3","b","t3"],
36 | ["t3","a","t4"],
37 | ["t3","a","t1"],
38 | ["t4","a","t4"],
39 | ["t4","b","t0"],
40 | ["t4","c","t0"],
41 | ["a0","a","t1"]
42 | ]
43 | }
44 |
--------------------------------------------------------------------------------
/tests/test_automata_IO.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | import unittest
3 | from .context import PySimpleAutomata
4 | from PySimpleAutomata import DFA
5 | from PySimpleAutomata import NFA
6 | from PySimpleAutomata import AFW
7 | from PySimpleAutomata import automata_IO
8 |
9 |
10 | ####################################################################
11 | # DFA ##############################################################
12 |
13 |
14 | class TestDfaDotImporter(TestCase):
15 | def setUp(self):
16 | self.maxDiff = None
17 | self.dfa_test = {
18 | 'alphabet': {'5c', '10c', 'gum'},
19 | 'states': {'s0', 's1', 's2', 's3'},
20 | 'initial_state': 's0',
21 | 'accepting_states': {'s0'},
22 | 'transitions': {('s0', '5c'): 's1',
23 | ('s0', '10c'): 's2',
24 | ('s1', '5c'): 's2',
25 | ('s1', '10c'): 's3',
26 | ('s2', '5c'): 's3',
27 | ('s2', '10c'): 's3',
28 | ('s3', 'gum'): 's0'}
29 | }
30 | self.dfa_test_02 = {
31 | 'alphabet': {'5c', '10c', 'gum'},
32 | 'states': {
33 | ('s3', 't2'),
34 | ('s3', 't3'),
35 | ('s0', 't3'),
36 | ('s2', 't3'),
37 | ('s2', 't0'),
38 | ('s1', 't2'),
39 | ('s0', 't0'),
40 | ('s1', 't4'),
41 | ('s0', 't1'),
42 | ('s0', 't5'),
43 | ('s2', 't1'),
44 | ('s2', 't5'),
45 | ('s3', 't4'),
46 | ('s3', 't0'),
47 | ('s0', 't2'),
48 | ('s2', 't2'),
49 | ('s1', 't0'),
50 | ('s1', 't3'),
51 | ('s1', 't5'),
52 | ('s3', 't1'),
53 | ('s0', 't4'),
54 | ('s2', 't4'),
55 | ('s3', 't5'),
56 | ('s1', 't1')
57 | },
58 | 'initial_state': ('s0', 't0'),
59 | 'accepting_states': {('s0', 't5'), ('s0', 't4')},
60 | 'transitions': {
61 | (('s3', 't3'), 'gum'): ('s0', 't1'),
62 | (('s0', 't1'), '10c'): ('s2', 't2'),
63 | (('s3', 't2'), 'gum'): ('s0', 't4'),
64 | (('s0', 't1'), '5c'): ('s1', 't5'),
65 | (('s2', 't1'), '10c'): ('s3', 't2'),
66 | (('s1', 't0'), '5c'): ('s2', 't1'),
67 | (('s1', 't1'), '10c'): ('s3', 't2'),
68 | (('s2', 't0'), '5c'): ('s3', 't1'),
69 | (('s0', 't2'), '5c'): ('s1', 't3'),
70 | (('s1', 't1'), '5c'): ('s2', 't5'),
71 | (('s3', 't5'), 'gum'): ('s0', 't0'),
72 | (('s1', 't2'), '5c'): ('s2', 't3'),
73 | (('s2', 't2'), '5c'): ('s3', 't3'),
74 | (('s2', 't1'), '5c'): ('s3', 't5'),
75 | (('s0', 't0'), '5c'): ('s1', 't1')
76 | }
77 | }
78 |
79 | def test_dfa_dot_importer(self):
80 | """ Tests importing a dfa from a simple dot file"""
81 | dfa_01 = automata_IO.dfa_dot_importer(
82 | './tests/dot/dfa/dfa_intersection_1_test_01.dot')
83 | self.assertDictEqual(dfa_01, self.dfa_test)
84 |
85 | def test_dfa_dot_importer_from_intersection(self):
86 | """ Tests importing a dfa from a dot file derived from an
87 | intersection """
88 | dfa_02 = automata_IO.dfa_dot_importer(
89 | './tests/dot/automata_io'
90 | '/automata_io_dfa_importing_intersection.dot')
91 | self.assertDictEqual(dfa_02, self.dfa_test_02)
92 |
93 | def test_dfa_dot_importer_no_state_only_transitions(self):
94 | """ WARNING! importing a .dot where no explicit state,
95 | but just transitions are present.
96 |
97 | This will NOT FAIL, but will recognize only transitions:
98 | states, initial states, accepting states will remain empty.
99 | """
100 |
101 | automata_IO.dfa_dot_importer(
102 | './tests/dot/automata_io'
103 | '/automata_io_dfa_importing_no_state.dot')
104 |
105 |
106 | class TestDfaToDot(TestCase):
107 | def setUp(self):
108 | self.maxDiff = None
109 | self.dfa_01 = automata_IO.dfa_dot_importer(
110 | './tests/dot/dfa/dfa_intersection_1_test_01.dot')
111 | self.dfa_02 = automata_IO.dfa_dot_importer(
112 | './tests/dot/dfa/dfa_intersection_2_test_01.dot')
113 | self.dfa_imported_intersect = automata_IO.dfa_dot_importer(
114 | './tests/dot/automata_io'
115 | '/automata_io_dfa_imported_intersection.dot')
116 | self.dfa_intersected = DFA.dfa_intersection(self.dfa_01,
117 | self.dfa_02)
118 |
119 | def test_dfa_to_dot(self):
120 | """ Tests a simple dfa render thorough graphiz library"""
121 | automata_IO.dfa_to_dot(self.dfa_01,
122 | 'graphviz_dfa_render_test',
123 | 'tests/outputs')
124 |
125 | def test_dfa_graphviz_intersection_render(self):
126 | """ Tests a rendering of a dfa resulting from an
127 | intersection, so consisting in more complex nodes"""
128 | automata_IO.dfa_to_dot(self.dfa_intersected,
129 | 'graphviz_dfa_intersection_render_test',
130 | 'tests/outputs')
131 |
132 |
133 | class TestDfaJsonImporter(TestCase):
134 | def setUp(self):
135 | self.maxDiff = None
136 | self.dfa_01 = {
137 | "alphabet": {
138 | "5c",
139 | "10c",
140 | "gum"
141 | },
142 | "states": {
143 | "s0",
144 | "s1",
145 | "s2",
146 | "s3",
147 | "s4"
148 | },
149 | "initial_state": "s0",
150 | "accepting_states": {
151 | "s0",
152 | "s2"
153 | },
154 | "transitions": {
155 | ("s0", "5c"): "s1",
156 | ("s0", "10c"): "s4",
157 | ("s1", "5c"): "s2",
158 | ("s1", "10c"): "s3",
159 | ("s2", "5c"): "s3",
160 | ("s2", "10c"): "s3",
161 | ("s4", "5c"): "s3",
162 | ("s4", "10c"): "s3",
163 | ("s3", "gum"): "s0"
164 | }
165 | }
166 |
167 | def test_dfa_json_importer(self):
168 | """ Tests a correct dfa import from json file """
169 | self.assertDictEqual(automata_IO.dfa_json_importer(
170 | './tests/json/dfa/dfa_json_importer_01.json'),
171 | self.dfa_01)
172 |
173 |
174 | class TestDfaToJson(TestCase):
175 | def setUp(self):
176 | self.maxDiff = None
177 | self.dfa_01 = automata_IO.dfa_json_importer(
178 | './tests/json/dfa/dfa_export_to_json_1.json')
179 | self.dfa_02 = {
180 | "alphabet": {
181 | "5c",
182 | "10c",
183 | "gum"
184 | },
185 | "states": {
186 | "s0",
187 | "s1",
188 | "s2",
189 | "s3",
190 | "s4"
191 | },
192 | "initial_state": "s0",
193 | "accepting_states": {
194 | "s0",
195 | "s2"
196 | },
197 | "transitions": {
198 | "s0": 'a',
199 | "s1": 'a',
200 | "s2": 'a',
201 | "s3": 'a',
202 | "s4": 'a'
203 | }
204 | }
205 |
206 | def test_dfa_to_json(self):
207 | """ Tests a correct export to JSON file of a dfa. """
208 | name = 'JSON_test_dfa_1'
209 | automata_IO.dfa_to_json(self.dfa_01, name, 'tests/outputs')
210 | re_imported_dfa = automata_IO.dfa_json_importer(
211 | 'tests/outputs/' + name + '.json')
212 | self.assertDictEqual(self.dfa_01, re_imported_dfa)
213 |
214 | def test_dfa_to_json_undetected_wrong_transitions(self):
215 | """ WARNING! Case where the dfa transitions are wrong but
216 | the export ends without problem. """
217 | name = 'JSON_test_dfa_2'
218 | automata_IO.dfa_to_json(self.dfa_02, name, 'tests/outputs')
219 |
220 |
221 | ####################################################################
222 | # NFA ##############################################################
223 |
224 | class TestNfaDotImporter(TestCase):
225 | def setUp(self):
226 | self.maxDiff = None
227 | self.nfa_test_01 = {
228 | 'alphabet': {'10c', '5c', 'gum'},
229 | 'states': {'s0', 's1', 's2', 's3'},
230 | 'initial_states': {'s0', 's3'},
231 | 'accepting_states': {'s0'},
232 | 'transitions': {
233 | ('s0', '10c'): {'s2'},
234 | ('s0', '5c'): {'s1', 's2'},
235 | ('s1', '10c'): {'s3'},
236 | ('s1', '5c'): {'s2', 's3'},
237 | ('s2', '10c'): {'s3'},
238 | ('s2', '5c'): {'s3'},
239 | ('s3', 'gum'): {'s0'}
240 | }
241 | }
242 | self.nfa_test_02 = {
243 | 'alphabet': {'5c', '10c', 'gum'},
244 | 'states': {
245 | ('c3', 't1'),
246 | ('s0', 't0'),
247 | ('c3', 'c1'),
248 | ('c1', 'c1'),
249 | ('c2', 'c2'),
250 | ('c4', 't3'),
251 | ('c4', 'c3'),
252 | ('c2', 't1'),
253 | ('c4', 't2'),
254 | ('s0', 't3'),
255 | ('c1', 'c4'),
256 | ('c2', 'c3'),
257 | ('c4', 'c4'),
258 | ('c2', 'c4'),
259 | ('c1', 't1'),
260 | ('s1', 'c2'),
261 | ('c1', 'c2'),
262 | ('s1', 't1'),
263 | ('s1', 't2'),
264 | ('c3', 't3'),
265 | ('c4', 'c2'),
266 | ('c3', 't2'),
267 | ('c2', 't2'),
268 | ('c4', 't1'),
269 | ('s0', 't1'),
270 | ('s0', 'c3'),
271 | ('s0', 't2'),
272 | ('s1', 'c4'),
273 | ('c2', 't3'),
274 | ('c2', 't0'),
275 | ('c4', 't0'),
276 | ('s0', 'c2'),
277 | ('c3', 'c4'),
278 | ('c1', 't0'),
279 | ('s0', 'c4'),
280 | ('c1', 't3'),
281 | ('s0', 'c1'),
282 | ('c1', 'c3'),
283 | ('c3', 't0'),
284 | ('s1', 't0'),
285 | ('c3', 'c2'),
286 | ('c4', 'c1'),
287 | ('c2', 'c1'),
288 | ('c1', 't2'),
289 | ('s1', 'c3'),
290 | ('s1', 't3'),
291 | ('s1', 'c1'),
292 | ('c3', 'c3')
293 | },
294 | 'initial_states': {('s0', 't0')},
295 | 'accepting_states': {('s1', 'c4'), ('c4', 'c4'),
296 | ('c4', 't3'), ('s1', 't3')},
297 | 'transitions': {
298 | (('c2', 'c2'), 'gum'): {('c4', 'c4')},
299 | (('c2', 't0'), '5c'): {('c3', 'c1')},
300 | (('s0', 't1'), 'gum'): {('s1', 't3')},
301 | (('c2', 'c2'), '5c'): {('c3', 'c3')},
302 | (('c3', 't2'), 'gum'): {('c1', 't0')},
303 | (('s0', 't1'), '5c'): {('c1', 't2')},
304 | (('c2', 't1'), 'gum'): {('c4', 't3')},
305 | (('c3', 't1'), 'gum'): {('c1', 't3')},
306 | (('s0', 'c2'), 'gum'): {('s1', 'c4')},
307 | (('c2', 't1'), '5c'): {('c3', 't2')},
308 | (('c1', 'c1'), '10c'): {('c2', 'c2')},
309 | (('s0', 't0'), '5c'): {('c1', 'c1')},
310 | (('c1', 't0'), '10c'): {('c2', 't1')},
311 | (('s0', 'c2'), '5c'): {('c1', 'c3')},
312 | (('s0', 't2'), 'gum'): {('s1', 't0')},
313 | (('c3', 'c3'), 'gum'): {('c1', 'c1')},
314 | (('c2', 'c3'), 'gum'): {('c4', 'c1')},
315 | (('c2', 't2'), 'gum'): {('c4', 't0')},
316 | (('c3', 'c2'), 'gum'): {('c1', 'c4')},
317 | (('s0', 'c3'), 'gum'): {('s1', 'c1')}
318 | }
319 | }
320 |
321 | def test_nfa_dot_importer(self):
322 | """ Tests importing a nfa from a simple .dot file """
323 | nfa_01 = automata_IO.nfa_dot_importer(
324 | './tests/dot/automata_io'
325 | '/automata_io_nfa_dot_importer_test_01.dot')
326 | self.assertDictEqual(nfa_01, self.nfa_test_01)
327 |
328 | def test_nfa_dot_importer_intersection(self):
329 | """ Tests importing a nfa from a dot file derived from an
330 | intersection """
331 | nfa_02 = automata_IO.nfa_dot_importer(
332 | './tests/dot/automata_io'
333 | '/automata_io_nfa_imported_intersection.dot')
334 | self.assertDictEqual(nfa_02, self.nfa_test_02)
335 |
336 | def test_nfa_dot_importer_from_simple_pydot_render(self):
337 | """ Tests if a dfa imported from dot file generated by
338 | nfa_pydot_render() is correct """
339 | nfa_01 = automata_IO.nfa_dot_importer(
340 | './tests/dot/automata_io'
341 | '/automata_io_nfa_importer_pydot_nfa_simple.dot')
342 | self.assertDictEqual(nfa_01, self.nfa_test_01)
343 |
344 |
345 | class TestNfaToDot(TestCase):
346 | def setUp(self):
347 | self.maxDiff = None
348 | self.nfa_test_01 = {
349 | 'alphabet': {'10c', '5c', 'gum'},
350 | 'states': {'s0', 's1', 's2', 's3'},
351 | 'initial_states': {'s0', 's3'},
352 | 'accepting_states': {'s0'},
353 | 'transitions': {
354 | ('s0', '10c'): {'s2'},
355 | ('s0', '5c'): {'s1', 's2'},
356 | ('s1', '10c'): {'s3'},
357 | ('s1', '5c'): {'s2', 's3'},
358 | ('s2', '10c'): {'s3'},
359 | ('s2', '5c'): {'s3'},
360 | ('s3', 'gum'): {'s0'}
361 | }
362 | }
363 | self.nfa_test_02 = {
364 | 'alphabet': {'5c', '10c', 'gum'},
365 | 'states': {
366 | ('c3', 't1'),
367 | ('s0', 't0'),
368 | ('c3', 'c1'),
369 | ('c1', 'c1'),
370 | ('c2', 'c2'),
371 | ('c4', 't3'),
372 | ('c4', 'c3'),
373 | ('c2', 't1'),
374 | ('c4', 't2'),
375 | ('s0', 't3'),
376 | ('c1', 'c4'),
377 | ('c2', 'c3'),
378 | ('c4', 'c4'),
379 | ('c2', 'c4'),
380 | ('c1', 't1'),
381 | ('s1', 'c2'),
382 | ('c1', 'c2'),
383 | ('s1', 't1'),
384 | ('s1', 't2'),
385 | ('c3', 't3'),
386 | ('c4', 'c2'),
387 | ('c3', 't2'),
388 | ('c2', 't2'),
389 | ('c4', 't1'),
390 | ('s0', 't1'),
391 | ('s0', 'c3'),
392 | ('s0', 't2'),
393 | ('s1', 'c4'),
394 | ('c2', 't3'),
395 | ('c2', 't0'),
396 | ('c4', 't0'),
397 | ('s0', 'c2'),
398 | ('c3', 'c4'),
399 | ('c1', 't0'),
400 | ('s0', 'c4'),
401 | ('c1', 't3'),
402 | ('s0', 'c1'),
403 | ('c1', 'c3'),
404 | ('c3', 't0'),
405 | ('s1', 't0'),
406 | ('c3', 'c2'),
407 | ('c4', 'c1'),
408 | ('c2', 'c1'),
409 | ('c1', 't2'),
410 | ('s1', 'c3'),
411 | ('s1', 't3'),
412 | ('s1', 'c1'),
413 | ('c3', 'c3')
414 | },
415 | 'initial_states': {('s0', 't0'), ('c1', 't3')},
416 | 'accepting_states': {('s1', 'c4'), ('c4', 'c4'),
417 | ('c4', 't3'), ('s1', 't3')},
418 | 'transitions': {
419 | (('c2', 'c2'), 'gum'): {('c4', 'c4')},
420 | (('c2', 't0'), '5c'): {('c3', 'c1')},
421 | (('s0', 't1'), 'gum'): {('s1', 't3')},
422 | (('c2', 'c2'), '5c'): {('c3', 'c3')},
423 | (('c3', 't2'), 'gum'): {('c1', 't0')},
424 | (('s0', 't1'), '5c'): {('c1', 't2')},
425 | (('c2', 't1'), 'gum'): {('c4', 't3')},
426 | (('c3', 't1'), 'gum'): {('c1', 't3')},
427 | (('s0', 'c2'), 'gum'): {('s1', 'c4')},
428 | (('c2', 't1'), '5c'): {('c3', 't2')},
429 | (('c1', 'c1'), '10c'): {('c2', 'c2')},
430 | (('s0', 't0'), '5c'): {('c1', 'c1')},
431 | (('c1', 't0'), '10c'): {('c2', 't1')},
432 | (('s0', 'c2'), '5c'): {('c1', 'c3')},
433 | (('s0', 't2'), 'gum'): {('s1', 't0')},
434 | (('c3', 'c3'), 'gum'): {('c1', 'c1')},
435 | (('c2', 'c3'), 'gum'): {('c4', 'c1')},
436 | (('c2', 't2'), 'gum'): {('c4', 't0')},
437 | (('c3', 'c2'), 'gum'): {('c1', 'c4')},
438 | (('s0', 'c3'), 'gum'): {('s1', 'c1')}
439 | }
440 | }
441 |
442 | def test_nfa_to_dot(self):
443 | """ Tests a simple nfa rendering thorough graphviz
444 | library"""
445 | automata_IO.nfa_to_dot(self.nfa_test_01,
446 | 'graphviz_nfa_simple',
447 | 'tests/outputs')
448 |
449 | def test_nfa_graphviz_intersection_render(self):
450 | """ Tests rendering through graphviz library a nfa
451 | derived from an intersection """
452 | automata_IO.nfa_to_dot(self.nfa_test_02,
453 | 'graphviz_nfa_intersection',
454 | 'tests/outputs')
455 |
456 |
457 | class TestNfaJsonImporter(TestCase):
458 | def setUp(self):
459 | self.maxDiff = None
460 | self.dfa_01 = {
461 | "alphabet": {
462 | "a",
463 | "b",
464 | "c"
465 | },
466 | "states": {
467 | "a0",
468 | "t0",
469 | "t1",
470 | "t2",
471 | "t3",
472 | "t4"
473 | },
474 | "initial_states": {
475 | "t0",
476 | "a0"
477 | },
478 | "accepting_states": {
479 | "t0",
480 | "t4",
481 | "a0"
482 | },
483 | "transitions": {
484 | ("t0", "b"): {"t1"},
485 | ("t0", "a"): {"t2"},
486 | ("t1", "c"): {"t3", "t2"},
487 | ("t1", "b"): {"t4"},
488 | ("t2", "b"): {"t1"},
489 | ("t2", "a"): {"t2", "t4"},
490 | ("t3", "c"): {"t0"},
491 | ("t3", "b"): {"t0", "t3"},
492 | ("t3", "a"): {"t4", "t1"},
493 | ("t4", "a"): {"t4"},
494 | ("t4", "b"): {"t0"},
495 | ("t4", "c"): {"t0"},
496 | ("a0", "a"): {"t1"}
497 | }
498 | }
499 |
500 | def test_nfa_json_importer(self):
501 | """ Tests a correct nfa import from a JSON file. """
502 | imported = automata_IO.nfa_json_importer(
503 | './tests/json/nfa/nfa_json_importer_1.json')
504 | self.assertDictEqual(imported,
505 | self.dfa_01)
506 |
507 |
508 | class TestNfaToJson(TestCase):
509 | def setUp(self):
510 | self.maxDiff = None
511 | self.nfa_01 = {
512 | "alphabet": {
513 | "a",
514 | "b",
515 | "c"
516 | },
517 | "states": {
518 | "a0",
519 | "t0",
520 | "t1",
521 | "t2",
522 | "t3",
523 | "t4"
524 | },
525 | "initial_states": {
526 | "t0",
527 | "a0"
528 | },
529 | "accepting_states": {
530 | "t0",
531 | "t4",
532 | "a0"
533 | },
534 | "transitions": {
535 | ("t0", "b"): {"t1"},
536 | ("t0", "a"): {"t2"},
537 | ("t1", "c"): {"t3", "t2"},
538 | ("t1", "b"): {"t4"},
539 | ("t2", "b"): {"t1"},
540 | ("t2", "a"): {"t2", "t4"},
541 | ("t3", "c"): {"t0"},
542 | ("t3", "b"): {"t0", "t3"},
543 | ("t3", "a"): {"t4", "t1"},
544 | ("t4", "a"): {"t4"},
545 | ("t4", "b"): {"t0"},
546 | ("t4", "c"): {"t0"},
547 | ("a0", "a"): {"t1"}
548 | }
549 | }
550 |
551 | def test_nfa_to_json(self):
552 | """ Tests a correct export to JSON file of a nfa. """
553 | name = 'JSON_test_nfa_1'
554 | automata_IO.nfa_to_json(self.nfa_01, name, 'tests/outputs')
555 | re_imported_nfa = automata_IO.nfa_json_importer(
556 | 'tests/outputs/' + name + '.json')
557 | self.assertDictEqual(self.nfa_01, re_imported_nfa)
558 |
559 |
560 | ####################################################################
561 | # AFW ##############################################################
562 |
563 |
564 | class TestAfwJsonImporter(TestCase):
565 | def setUp(self):
566 | self.maxDiff = None
567 | self.afw_test_01 = {
568 | 'alphabet': {'a', 'b'},
569 | 'states': {'s', 'q0', 'q1', 'q2'},
570 | 'initial_state': 's',
571 | 'accepting_states': {'q0'},
572 | 'transitions': {
573 | ('q0', 'b'): 'q0 or q2',
574 | ('q0', 'a'): 'q1',
575 | ('q1', 'a'): 'q0',
576 | ('q1', 'b'): 'q1 or q2',
577 | ('q2', 'a'): 'q2',
578 | ('s', 'a'): 's',
579 | ('s', 'b'): 's and q0'
580 | }
581 | }
582 | self.afw_test_empty = {
583 | 'alphabet': set(),
584 | 'states': set(),
585 | 'initial_state': 'state_0',
586 | 'accepting_states': set(),
587 | 'transitions': {}
588 | }
589 |
590 | def test_afw_json_importer(self):
591 | """ Tests importing a afw from a .json file """
592 | afw_01 = automata_IO.afw_json_importer(
593 | './tests/json/automata_io'
594 | '/automata_io_afw_json_importer_test_01.json')
595 | self.assertDictEqual(afw_01, self.afw_test_01)
596 |
597 |
598 | class TestAfwToJson(TestCase):
599 | def setUp(self):
600 | self.maxDiff = None
601 | self.afw_test_01 = {
602 | 'alphabet': {'a', 'b'},
603 | 'states': {'s', 'q0', 'q1', 'q2'},
604 | 'initial_state': 's',
605 | 'accepting_states': {'q0'},
606 | 'transitions': {
607 | ('q0', 'b'): 'q0 or q2',
608 | ('q0', 'a'): 'q1',
609 | ('q1', 'a'): 'q0',
610 | ('q1', 'b'): 'q1 or q2',
611 | ('q2', 'a'): 'q2',
612 | ('s', 'a'): 's',
613 | ('s', 'b'): 's and q0'
614 | }
615 | }
616 | self.afw_test_empty = {
617 | 'alphabet': set(),
618 | 'states': set(),
619 | 'initial_state': 'state_0',
620 | 'accepting_states': set(),
621 | 'transitions': {}
622 | }
623 |
624 | def test_afw_to_json(self):
625 | """ Tests a correct afw export to JSON file """
626 | name = 'JSON_afw_export'
627 | automata_IO.afw_to_json(self.afw_test_01, name,
628 | 'tests/outputs')
629 | re_imported_afw = automata_IO.afw_json_importer(
630 | 'tests/outputs/' + name + '.json')
631 | self.assertDictEqual(self.afw_test_01, re_imported_afw)
632 |
--------------------------------------------------------------------------------