├── .gitignore ├── LICENSE ├── README.md ├── shared ├── __init__.py ├── data │ ├── __init__.py │ ├── easing.py │ ├── expression.py │ ├── simulators │ │ ├── __init__.py │ │ ├── drunk.py │ │ ├── mixins │ │ │ ├── __init__.py │ │ │ ├── easing.py │ │ │ ├── expression.py │ │ │ ├── graph.py │ │ │ ├── support.py │ │ │ └── tags.py │ │ └── process.py │ ├── stats.py │ ├── toml │ │ ├── __init__.py │ │ ├── _init.py │ │ ├── decoder.py │ │ ├── encoder.py │ │ ├── ordered.py │ │ └── tz.py │ └── yaml │ │ ├── __init__.py │ │ ├── composer.py │ │ ├── constructor.py │ │ ├── core.py │ │ ├── dumper.py │ │ ├── emitter.py │ │ ├── error.py │ │ ├── events.py │ │ ├── loader.py │ │ ├── nodes.py │ │ ├── parser.py │ │ ├── reader.py │ │ ├── representer.py │ │ ├── resolver.py │ │ ├── scanner.py │ │ ├── serializer.py │ │ └── tokens.py └── tools │ ├── __init__.py │ ├── compat.py │ ├── crowbar.py │ ├── data.py │ ├── debug │ ├── __init__.py │ ├── _test.py │ ├── _test_scenario.py │ ├── breakpoint.py │ ├── codecache.py │ ├── frame.py │ ├── hijack.py │ ├── proxy.py │ ├── snapshot.py │ ├── tracer.py │ └── trap.py │ ├── dictclass.py │ ├── differential.py │ ├── dump.py │ ├── enum.py │ ├── examples.py │ ├── expression.py │ ├── global.py │ ├── hotload.py │ ├── hotpatch.py │ ├── logging.py │ ├── macro.py │ ├── memoize.py │ ├── meta.py │ ├── monitor.py │ ├── net.py │ ├── overwatch.py │ ├── pretty.py │ ├── profile.py │ ├── runtime.py │ ├── sidecar.py │ ├── snapshot │ ├── __init__.py │ ├── engine.py │ ├── ia │ │ ├── __init__.py │ │ ├── global.py │ │ ├── project.py │ │ ├── tags.py │ │ ├── vision.py │ │ └── webdev.py │ ├── sepasoft │ │ ├── __init__.py │ │ ├── model.py │ │ └── webservices.py │ └── utils.py │ ├── thread.py │ ├── timing.py │ ├── trap.py │ ├── venv.py │ └── wrapped.py └── test └── shared └── tools ├── __init__.py ├── test_data.py ├── test_logging.py ├── test_meta.py ├── test_thread.py └── test_venv.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.class 3 | *.sublime-* 4 | **/.ipynb_checkpoints/* 5 | 6 | *.sublime-workspace 7 | *.sublime-project 8 | 9 | nodeenv 10 | 11 | # Byte-compiled / optimized / DLL files 12 | __pycache__/ 13 | *.py[cod] 14 | *$py.class 15 | 16 | # C extensions 17 | *.so 18 | 19 | # Distribution / packaging 20 | .Python 21 | build/ 22 | develop-eggs/ 23 | dist/ 24 | downloads/ 25 | eggs/ 26 | .eggs/ 27 | lib/ 28 | lib64/ 29 | parts/ 30 | sdist/ 31 | var/ 32 | wheels/ 33 | pip-wheel-metadata/ 34 | share/python-wheels/ 35 | *.egg-info/ 36 | .installed.cfg 37 | *.egg 38 | MANIFEST 39 | 40 | # PyInstaller 41 | # Usually these files are written by a python script from a template 42 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 43 | *.manifest 44 | *.spec 45 | 46 | # Installer logs 47 | pip-log.txt 48 | pip-delete-this-directory.txt 49 | 50 | # Unit test / coverage reports 51 | htmlcov/ 52 | .tox/ 53 | .nox/ 54 | .coverage 55 | .coverage.* 56 | .cache 57 | nosetests.xml 58 | coverage.xml 59 | *.cover 60 | *.py,cover 61 | .hypothesis/ 62 | .pytest_cache/ 63 | 64 | # Translations 65 | *.mo 66 | *.pot 67 | 68 | # Django stuff: 69 | *.log 70 | local_settings.py 71 | db.sqlite3 72 | db.sqlite3-journal 73 | 74 | # Flask stuff: 75 | instance/ 76 | .webassets-cache 77 | 78 | # Scrapy stuff: 79 | .scrapy 80 | 81 | # Sphinx documentation 82 | docs/_build/ 83 | 84 | # PyBuilder 85 | target/ 86 | 87 | # Jupyter Notebook 88 | .ipynb_checkpoints 89 | 90 | # IPython 91 | profile_default/ 92 | ipython_config.py 93 | 94 | # pyenv 95 | .python-version 96 | 97 | # pipenv 98 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 99 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 100 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 101 | # install all needed dependencies. 102 | #Pipfile.lock 103 | 104 | # celery beat schedule file 105 | celerybeat-schedule 106 | 107 | # SageMath parsed files 108 | *.sage.py 109 | 110 | # Environments 111 | .env 112 | .venv 113 | env/ 114 | venv/ 115 | ENV/ 116 | env.bak/ 117 | venv.bak/ 118 | 119 | # Spyder project settings 120 | .spyderproject 121 | .spyproject 122 | 123 | # Rope project settings 124 | .ropeproject 125 | 126 | # mkdocs documentation 127 | /site 128 | 129 | # mypy 130 | .mypy_cache/ 131 | .dmypy.json 132 | dmypy.json 133 | 134 | # Pyre type checker 135 | .pyre/ -------------------------------------------------------------------------------- /shared/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CorsoSource/metatools/a4252820c42c5907bb08cfef4b5817362aa6a09c/shared/__init__.py -------------------------------------------------------------------------------- /shared/data/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CorsoSource/metatools/a4252820c42c5907bb08cfef4b5817362aa6a09c/shared/data/__init__.py -------------------------------------------------------------------------------- /shared/data/easing.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python port of the easings from the excellent resource https://github.com/ai/easings.net 3 | URL: https://easings.net 4 | 5 | Some minor additions and wrappers made for convenience. 6 | """ 7 | 8 | 9 | import math 10 | 11 | try: 12 | from itertools import permutations 13 | except ImportError: 14 | from shared.tools.compat import permutations 15 | 16 | from shared.tools.enum import Enum 17 | 18 | class DIRECTION(Enum): 19 | IN = 'in' 20 | OUT = 'out' 21 | IN_OUT = 'in_out' 22 | 23 | class ALGORITHM(Enum): 24 | 25 | QUADRATIC = 'quad' # Quadratic - squared 26 | CUBIC = 'cubic' # Cubic - x ^ third 27 | QUARTIC = 'quart' # Quartic - x to the four 28 | QUINTIC = 'quint' # Quintic - x's fifth power 29 | 30 | LINEAR = 'linear' 31 | SINE = 'sine' 32 | EXPONENT = 'expo' 33 | CIRCULAR = 'circ' 34 | BACK = 'back' 35 | ELASTIC = 'elastic' 36 | 37 | 38 | __license__ = 'GPLv3' 39 | 40 | # Modifications and additions by: 41 | __maintainer__ = 'Andrew Geiger' 42 | __email__ = 'andrew.geiger@corsosystems.com' 43 | 44 | 45 | class MetaEaseFunctions(type): 46 | 47 | def __init__(cls, clsname, bases, attributes): 48 | 49 | for power, algo in enumerate(cls._algebraic): 50 | for direction in cls._directions: 51 | powerFunction = getattr(cls, '%s_power' % direction) 52 | def powerClosure(cls, x, p=power+2, pfun=powerFunction): 53 | return pfun(x,p) 54 | setattr(cls, '%s_%s' % (direction, algo), classmethod(powerClosure)) 55 | 56 | for firstList,secondList in permutations([cls._directions,cls._algos]): 57 | for first in firstList: 58 | 59 | class EaseChain(object): 60 | pass 61 | 62 | for second in secondList: 63 | try: 64 | function = getattr(cls, '%s_%s' % (first, second)) 65 | except AttributeError: 66 | function = getattr(cls, '%s_%s' % (second,first)) 67 | 68 | @classmethod 69 | def closure(_, x, fun=function): 70 | return fun(x) 71 | 72 | setattr(EaseChain, second.upper(), closure) 73 | 74 | setattr(cls, first.upper(), EaseChain) 75 | 76 | return super(MetaEaseFunctions, cls).__init__(clsname, bases, attributes) 77 | 78 | 79 | 80 | class EaseFunctions(object): 81 | """An easing object. 82 | Transliterated from 83 | https://github.com/ai/easings.net/blob/master/src/easings/easingsFunctions.ts 84 | """ 85 | __metaclass__ = MetaEaseFunctions 86 | 87 | _algebraic = ('quad', 'cubic', 'quart', 'quint') 88 | _algos = _algebraic + ('linear', 'sine', 'expo', 'circ', 'back', 'elastic') 89 | _directions = ('in', 'out', 'in_out') 90 | 91 | 92 | _c1 = 1.70158 93 | _c2 = _c1 * 1.525 94 | _c3 = _c1 + 1 95 | _c4 = (2 * math.pi) / 3 96 | _c5 = (2 * math.pi) / 4.5 97 | 98 | 99 | @classmethod 100 | def in_linear(cls, x): 101 | return x 102 | 103 | @classmethod 104 | def out_linear(cls, x): 105 | return x 106 | 107 | @classmethod 108 | def in_out_linear(cls, x): 109 | return x 110 | 111 | 112 | @classmethod 113 | def in_power(cls, x, power): 114 | return math.pow(x, power) 115 | 116 | @classmethod 117 | def out_power(cls, x, power): 118 | return 1 - math.pow(1-x, power) 119 | 120 | @classmethod 121 | def in_out_power(cls, x, power): 122 | if x < 0.5: 123 | return 2*(power-1)*math.pow(x,power) 124 | else: 125 | return 1 - math.pow(-2*x + 2, power) / 2.0 126 | 127 | 128 | @classmethod 129 | def in_sine(cls, x): 130 | return 1 - math.cos((x*math.pi)/2.0) 131 | 132 | @classmethod 133 | def out_sine(cls, x): 134 | return math.sin((x*math.pi)/2.0) 135 | 136 | @classmethod 137 | def in_out_sine(cls, x): 138 | return -(math.cos(x*math.pi) - 1)/2.0 139 | 140 | 141 | @classmethod 142 | def in_expo(cls, x): 143 | if x == 0: 144 | return 0 145 | return math.pow(2, 10*x - 10) 146 | 147 | @classmethod 148 | def out_expo(cls, x): 149 | if x == 1: 150 | return 1 151 | return 1 - math.pow(2, -10*x) 152 | 153 | @classmethod 154 | def in_out_expo(cls, x): 155 | if x == 0: 156 | return 0 157 | if x == 1: 158 | return 1 159 | if x < 0.5: 160 | return math.pow(2, 20*x - 10)/2.0 161 | else: 162 | return (2 - math.pow(2, -20*x + 10))/2.0 163 | 164 | 165 | @classmethod 166 | def in_circ(cls, x): 167 | return 1 - math.sqrt(1 - math.pow(x, 2)) 168 | 169 | @classmethod 170 | def out_circ(cls, x): 171 | return math.sqrt(1 - math.pow(x - 1, 2)) 172 | 173 | @classmethod 174 | def in_out_circ(cls, x): 175 | if x < 0.5: 176 | return (1 - math.sqrt(1 - math.pow(2*x, 2)))/2.0 177 | else: 178 | return (math.sqrt(1 - math.pow(-2*x + 2, 2)) + 1)/2.0 179 | 180 | 181 | @classmethod 182 | def in_back(cls, x): 183 | return cls._c3*math.pow(x, 3) - cls._c1*math.pow(x, 2) 184 | 185 | @classmethod 186 | def out_back(cls, x): 187 | return 1 + cls._c3*math.pow(x-1, 3) + cls._c1*math.pow(x-1, 2) 188 | 189 | @classmethod 190 | def in_out_back(cls, x): 191 | if x < 0.5: 192 | return (math.pow(2*x, 2)*((cls._c2 + 1)*2*x - cls._c2))/2.0 193 | else: 194 | return (math.pow(2*x - 2, 2)*((cls._c2 + 1)*(x*2 - 2) + cls._c2) + 2)/2.0 195 | 196 | 197 | @classmethod 198 | def in_elastic(cls, x): 199 | if x == 0: 200 | return 0 201 | if x == 1: 202 | return 1 203 | return -math.pow(2, 10*x - 10) * math.sin((x*10 - 10.75) * cls._c4) 204 | 205 | @classmethod 206 | def out_elastic(cls, x): 207 | if x == 0: 208 | return 0 209 | if x == 1: 210 | return 1 211 | return math.pow(2, -10*x) * math.sin((x*10 - 0.75) * cls._c4) + 1 212 | 213 | 214 | @classmethod 215 | def in_out_elastic(cls, x): 216 | if x == 0: 217 | return 0 218 | if x == 1: 219 | return 1 220 | if x < 0.5: 221 | return -(math.pow(2, 20*x - 10) * math.sin((20*x - 11.125) * cls._c5))/2.0 222 | else: 223 | return (math.pow(2, -20*x + 10) * math.sin((20*x - 11.125) * cls._c5))/2.0 + 1 224 | 225 | 226 | @classmethod 227 | def in_bounce(cls, x): 228 | return 1 - cls.out_bounce(1-x) 229 | 230 | @classmethod 231 | def out_bounce(cls, x): 232 | n = 7.5625 233 | d = 2.75 234 | 235 | if x < (1.0/d): 236 | c = 0 237 | 238 | elif x < (2.0/d): 239 | x -= 1.5/d 240 | c = 0.75 241 | 242 | elif x < (2.5/d): 243 | x -= 2.25/d 244 | c = 0.9375 245 | 246 | else: 247 | x -= 2.625/d 248 | c = 0.984375 249 | 250 | return n1 * x * x + c 251 | 252 | @classmethod 253 | def in_out_bounce(cls, x): 254 | if x < 0.5: 255 | return (1 - cls.out_bounce(1 - 2*x))/2.0 256 | else: 257 | return (1 + cls.out_bounce(2*x - 1))/2.0 258 | 259 | 260 | class Easing(object): 261 | __slots__ = ('function', 262 | 'start', 'finish', 263 | 'time_start', 'time_end', 'steps') 264 | 265 | def __init__(self, 266 | ease_type=ALGORITHM.LINEAR, 267 | direction=DIRECTION.IN, 268 | start=0.0, 269 | finish=1.0, 270 | steps=None, 271 | time_start=0.0, 272 | time_end=None, 273 | duration=None, 274 | ): 275 | """ 276 | Ease from start to finish. 277 | 278 | If steps are an integer, it's assumed to iterate that many times. 279 | If steps are a float, it's assumed the steps run until time_end. 280 | If the steps are undefined, it's assumed the param is in relation 281 | to time_start and time_end. 282 | 283 | Remember the fencepost problem: steps are the fence, not the posts. 284 | You start at the beginning - the first step is the first increment. 285 | Thus for steps=10, the iterable will yield 11 times! 286 | """ 287 | 288 | self.function = getattr(EaseFunctions, '%s_%s' % (direction, ease_type)) 289 | 290 | self.start = start * 1.0 291 | self.finish = finish * 1.0 292 | 293 | if duration and not time_end: 294 | time_end = time_start + duration 295 | 296 | assert time_start < time_end, "Time must flow forward (though scale may not)" 297 | self.time_start = time_start * 1.0 298 | self.time_end = time_end * 1.0 299 | self.steps = steps or None 300 | 301 | @property 302 | def step_by_count(self): 303 | return self.steps and isinstance(self.steps, (int, long)) 304 | @property 305 | def step_by_increment(self): 306 | return self.steps and isinstance(self.steps, (float,)) 307 | 308 | @property 309 | def span(self): 310 | return self.finish - self.start 311 | 312 | @property 313 | def time_span(self): 314 | return self.time_end - self.time_start 315 | 316 | @property 317 | def time_bounds(self): 318 | return slice(self.time_start, self.time_end, self.steps) 319 | 320 | 321 | def normalize_time(self, t): 322 | if self.steps: 323 | if self.step_by_count: 324 | if t <= 0: 325 | return 0.0 326 | elif t >= self.steps: 327 | return 1.0 328 | else: 329 | return (t * 1.0) / self.steps 330 | elif self.step_by_increment: 331 | total_inc = (t * 1.0) * self.steps 332 | if total_inc <= 0: 333 | return 0.0 334 | elif total_inc >= self.time_span: 335 | return 1.0 336 | else: 337 | return (total_inc / self.time_span) 338 | else: 339 | if t <= self.time_start: 340 | return 0.0 341 | elif t >= self.time_end: 342 | return 1.0 343 | return (t - self.time_start) / self.time_span 344 | 345 | def interpolate_scale(self, fraction): 346 | return self.start + (fraction * (self.span)) 347 | 348 | 349 | @property 350 | def scale_bounds(self): 351 | return slice(self.start, self.finish) 352 | 353 | def __call__(self, t): 354 | t_norm = self.normalize_time(t) 355 | y_norm = self.function(t_norm) 356 | return self.interpolate_scale(y_norm) 357 | 358 | def __iter__(self): 359 | assert self.steps, "Cannot iterate without a steps defined." 360 | if self.step_by_count: 361 | for i in range(self.steps): 362 | yield i, self(i) 363 | else: 364 | yield self.steps, self.finish 365 | elif self.step_by_increment: 366 | for t in range(int(self.time_span/self.steps)): 367 | yield (t * self.steps) + self.time_start, self(t) 368 | else: 369 | if (t * self.steps) + self.time_start < self.time_end: 370 | yield self.time_end, self.finish 371 | 372 | -------------------------------------------------------------------------------- /shared/data/simulators/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CorsoSource/metatools/a4252820c42c5907bb08cfef4b5817362aa6a09c/shared/data/simulators/__init__.py -------------------------------------------------------------------------------- /shared/data/simulators/drunk.py: -------------------------------------------------------------------------------- 1 | import random 2 | 3 | class DrunkenWalk(object): 4 | """A random walker that can get progressively more or less random over time.""" 5 | 6 | _reasonable = dict( 7 | money=100.0, 8 | tolerance=3.0, # ABV div 3, soberiety gained per recoveryRate steps 9 | alcoholism=100, 10 | recoveryRate=1000.0 11 | ) 12 | 13 | _maxBounce = 0.2 14 | _antiTopple = (0.0, 0.0) # (0.2,0.8) 15 | 16 | def __init__(self, initValue=0, inebriation=0.12, 17 | money=None, tolerance=None, alcoholism=None, recoveryRate=None, 18 | handrails=(0,10.0), leaning=None, stride=None, 19 | boozeMenu=[(10,0.12)]): 20 | """Perform a random walk. The more inebriated, the more the value wanders. 21 | 22 | If money is given, more booze will be bought and indebriation gets worse each step. 23 | Inebriation goes down each step depending on the tolerance. 24 | 25 | Alcoholism determines how many steps before another drink it purchased. 26 | """ 27 | self.value = initValue 28 | self.inebriation = inebriation 29 | self.money = money or 0 30 | self.alcoholism = alcoholism or 0 31 | self.tolerance = tolerance or 0 32 | self.recoveryRate = recoveryRate or 0 33 | if self.tolerance and not self.recoveryRate: 34 | self.recoveryRate = self._reasonable['recoveryRate'] 35 | 36 | self.leaning = leaning or 0.5 37 | self.stride = stride or 2 38 | 39 | self.boozeMenu = boozeMenu 40 | self.steps = 0 41 | self.handrails = handrails 42 | 43 | 44 | def stumble(self): 45 | 46 | self.steps += 1 47 | 48 | if self.alcoholism and (self.steps % self.alcoholism == 0): 49 | self.drink() 50 | if self.tolerance: 51 | self.inebriation -= self.tolerance / self.recoveryRate 52 | self.inebriation = max((0, self.inebriation)) 53 | 54 | self.leaning += (self.leaning * 0.1) 55 | if self._antiTopple: 56 | left,right = self._antiTopple 57 | if self.leaning > right: 58 | self.leaning = right 59 | elif self.leaning < left: 60 | self.leaning = left 61 | 62 | self.value += (random.random()-(self.leaning+0.5))*self.inebriation*self.stride 63 | 64 | if self.handrails: 65 | left,right = self.handrails 66 | if self.value < left: 67 | self.value = left 68 | self.leaning = 0.0 + abs(self.leaning/2) 69 | elif self.value > right: 70 | self.value = right 71 | self.leaning = 0.0 - abs(self.leaning/2) 72 | 73 | return self.value 74 | 75 | 76 | def traipse(self, steps=None): 77 | for step in range(steps or self.alcoholism): 78 | _ = self.stumble() 79 | return self.value 80 | 81 | 82 | def drink(self): 83 | if self.money: 84 | cost,abv = random.choice(abv for cost,abv in self.boozeMenu if cost <=self.money) 85 | self.money -= cost 86 | self.inebriation += abv/(self.tolerance) -------------------------------------------------------------------------------- /shared/data/simulators/mixins/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CorsoSource/metatools/a4252820c42c5907bb08cfef4b5817362aa6a09c/shared/data/simulators/mixins/__init__.py -------------------------------------------------------------------------------- /shared/data/simulators/mixins/easing.py: -------------------------------------------------------------------------------- 1 | from shared.data.simulators.mixins.support import MixinFunctionSupport 2 | from shared.data.easing import Easing 3 | 4 | 5 | class EasingMixin(MixinFunctionSupport): 6 | 7 | # Required overrides for mixin functions 8 | 9 | def _configure_default_(self, variable): 10 | return { 11 | 'start': self._variables[variable], 12 | 'time_start': self._variables.get(self._escapement_variable, self._DEFAULT_START_VALUE), 13 | } 14 | 15 | def _configure_function_(self, **configuration): 16 | return Easing(**configuration) 17 | -------------------------------------------------------------------------------- /shared/data/simulators/mixins/expression.py: -------------------------------------------------------------------------------- 1 | from shared.data.simulators.mixins.support import MixinFunctionSupport 2 | from shared.data.expression import Expression 3 | 4 | 5 | class ExpressionMixin(MixinFunctionSupport): 6 | 7 | # Required overrides for mixin functions 8 | 9 | def _configure_default_(self, variable): 10 | return {} 11 | 12 | def _configure_function_(self, expression): 13 | return Expression(expression) 14 | 15 | # Additional overrides to intercept configuration 16 | 17 | def _resolve_variable_definition(self, variable_definition): 18 | if isinstance(variable_definition, (str, unicode)): 19 | return { 20 | 'kind': 'Expression', 21 | 'config': { 22 | 'expression': variable_definition 23 | } 24 | } 25 | 26 | return super(ExpressionMixin, self)._resolve_variable_definition(variable_definition) 27 | 28 | 29 | def _resolve_arguments(self, some_callable): 30 | if isinstance(some_callable, Expression): 31 | return some_callable._fields 32 | return super(ExpressionMixin, self)._resolve_arguments(some_callable) 33 | 34 | 35 | def _initialize_conditional(self, conditional): 36 | if isinstance(conditional, (str, unicode)): 37 | return self._close_function(Expression(conditional)) 38 | 39 | return super(ExpressionMixin, self)._initialize_conditional(conditional) -------------------------------------------------------------------------------- /shared/data/simulators/mixins/graph.py: -------------------------------------------------------------------------------- 1 | from transitions.extensions import GraphMachine 2 | 3 | 4 | class GraphMixin(GraphMachine): 5 | 6 | def _init_graphviz_engine(self, use_pygraphviz): 7 | 8 | Graph = super(GraphMixin, self)._init_graphviz_engine(use_pygraphviz) 9 | 10 | class TweakedGraph(Graph): 11 | 12 | _TRANSITION_CHECK = self._TRANSITION_CHECK 13 | 14 | def _transition_label(self, tran): 15 | if tran.get('trigger') == self._TRANSITION_CHECK: 16 | return '' 17 | else: 18 | return super(TweakedGraph, self)._transition_label(tran) 19 | 20 | return TweakedGraph 21 | 22 | 23 | -------------------------------------------------------------------------------- /shared/data/simulators/mixins/support.py: -------------------------------------------------------------------------------- 1 | """ 2 | Mixin function template and support for Process simulation 3 | """ 4 | 5 | 6 | class MetaFunctionMixin(type): 7 | 8 | _meta_config_methods = ('_configure_default_', 9 | '_configure_function_', 10 | ) 11 | 12 | def __new__(cls, clsname, bases, attributes): 13 | 14 | if any(( 15 | clsname == 'MixinFunctionSupport', 16 | not clsname.endswith('Mixin'), 17 | )): 18 | return super(MetaFunctionMixin, cls).__new__(cls, clsname, bases, attributes) 19 | 20 | support_name = clsname[:-len('Mixin')] # clsname.partition('_')[2] 21 | 22 | for method_name in cls._meta_config_methods: 23 | 24 | specialized_method_name = '%s%s' % (method_name, support_name) 25 | if method_name in attributes: 26 | method = attributes.pop(method_name) 27 | else: 28 | raise NotImplementedError('MixinFunctionSupport subclasses must at least cover the default config methods: %r' % cls._meta_config_methods) 29 | attributes[specialized_method_name] = method 30 | 31 | return super(MetaFunctionMixin, cls).__new__(cls, clsname, bases, attributes) 32 | 33 | 34 | class MixinFunctionSupport(object): 35 | __metaclass__ = MetaFunctionMixin 36 | 37 | def _configure_default_(self, variable): 38 | # by default pass along enough info to come up with something useful 39 | return dict(variable=variable, value=self._DEFAULT_START_VALUE) 40 | 41 | def _configure_function_(self, **configuration): 42 | if 'hold' in configuration: 43 | def hold_value(self=self, variable=configuration['variable']): 44 | return self._variables[variable] 45 | return hold_value 46 | elif 'value' in configuration: 47 | def static_value(self=self, value=configuration['value']): 48 | return value 49 | return static_value 50 | elif 'function' in configuration: 51 | return configuration['function'] 52 | raise NotImplementedError('Mixins should explicitly configure functions.') 53 | 54 | def _resolve_arguments(self, some_callable): 55 | return super(MixinFunctionSupport, self)._resolve_arguments(some_callable) 56 | 57 | def _resolve_variable_definition(self, variable_definition): 58 | return super(MixinFunctionSupport, self)._resolve_variable_definition(variable_definition) 59 | -------------------------------------------------------------------------------- /shared/data/simulators/mixins/tags.py: -------------------------------------------------------------------------------- 1 | import re 2 | from java.util import Date 3 | 4 | from shared.tools.enum import Enum 5 | 6 | class TAG_OVERWRITE_POLICY(Enum): 7 | ABORT = 'a' 8 | OVERWRITE = 'o' 9 | IGNORE = 'i' 10 | MERGE = 'm' 11 | 12 | 13 | def mask_dict(default, overrides, **kwarg_overrides): 14 | return dict((key,kwarg_overrides.get(key, 15 | overrides.get(key, 16 | default.get(key, KeyError)))) 17 | for key 18 | in set(kwarg_overrides.keys() 19 | + overrides.keys() 20 | + default.keys()) ) 21 | 22 | 23 | class TagsMixin(object): 24 | 25 | _TAG_TYPE_MAP = { 26 | str: 'String', 27 | int: 'Int4', 28 | float: 'Float8', 29 | bool: 'Boolean', 30 | Date: 'DateTime', 31 | } 32 | 33 | #https://regex101.com/r/ogqErX/4 34 | _TAG_PATTERN = re.compile(r""" 35 | ^ (?P 36 | # If a tag provider is given, match that first 37 | (\[(?P[a-z0-9_\- ]+)\])? 38 | # Capture the full path as well 39 | (?P 40 | # Everything after the provider is a parent path 41 | # After the parent path, no forward slashes can be 42 | # matched, so the negative lookahead assures that. 43 | ((?P.*?)[/\\])?(?![/\\]) 44 | # The tag name won't have a forward slash, and if 45 | # missing we'll check that it's a folder 46 | (?P[a-z0-9_\- ]+)?) 47 | ) $ 48 | """, re.X + re.I) 49 | 50 | 51 | def __init__(self, tags=None, **configuration): 52 | 53 | assert 'folder' in tags, 'Tag folder for variables needed in config' 54 | 55 | self._tag_definitions = mask_dict({ 56 | 'collision policy': TAG_OVERWRITE_POLICY.OVERWRITE, 57 | 'resume': True, # instead of clearing out, load back in, if possible 58 | }, tags or {}) 59 | 60 | super(TagsMixin, self).__init__(**configuration) 61 | 62 | self._initialize_tags() 63 | 64 | 65 | def _initialize_tags(self): 66 | 67 | def override_tag_config(tag_name, configuration=self._tag_definitions['configuration']): 68 | return mask_dict( 69 | configuration.get('_default', {}), 70 | configuration.get(tag_name, {}) 71 | ) 72 | 73 | def check_init_value(self, variable_name, default): 74 | if self._tag_definitions['resume']: 75 | if system.tag.exists(self._tag_definitions['folder'] + '/' + variable_name): 76 | value = system.tag.read(self._tag_definitions['folder'] + '/' + variable_name).value 77 | self._variable[variable_name] = value 78 | return value 79 | return default 80 | 81 | root_parts = { 82 | 'provider': 'default', 83 | 'parent': '' 84 | } 85 | 86 | root_parts.update( 87 | dict((k,v) 88 | for k,v 89 | in self._TAG_PATTERN.match(self._tag_definitions['folder']).groupdict().items() 90 | if v)) 91 | 92 | tag_definitions = { 93 | 'tagType': 'Folder', 94 | 'name': root_parts['name'], 95 | 'tags': [], 96 | } 97 | 98 | if self._raw_definition: 99 | tag_definitions['tags'].append(mask_dict({ 100 | 'name': '_definition_', 101 | 'tagType': 'AtomicTag', 102 | 'valueSource': 'memory', 103 | 'dataType': 'String', 104 | 'value': self._raw_definition, 105 | }, override_tag_config('_definition_'))) 106 | 107 | # treat state as a special case 108 | if self._tag_definitions['resume']: 109 | if system.tag.exists(self._tag_definitions['folder'] + '/' + 'state'): 110 | self.state = system.tag.read(self._tag_definitions['folder'] + '/' + 'state').value 111 | 112 | tag_definitions['tags'].append(mask_dict({ 113 | 'name': 'state', 114 | 'tagType': 'AtomicTag', 115 | 'valueSource': 'memory', 116 | 'dataType': 'String', 117 | 'value': self.state, 118 | }, override_tag_config('state'))) 119 | 120 | for variable, value in self._variables.items(): 121 | if self._tag_definitions['resume']: 122 | if system.tag.exists(self._tag_definitions['folder'] + '/' + variable): 123 | value = self._variables[variable] = system.tag.read(self._tag_definitions['folder'] + '/' + variable).value 124 | 125 | tag_definitions['tags'].append(mask_dict({ 126 | 'name': variable, 127 | 'tagType': 'AtomicTag', 128 | 'valueSource': 'memory', 129 | 'dataType': self._TAG_TYPE_MAP[type(value)], 130 | 'value': value, 131 | }, override_tag_config(variable))) 132 | 133 | system.tag.configure( 134 | basePath = '[%(provider)s]%(parent)s' % root_parts, 135 | tags = tag_definitions, 136 | collisionPolicy = self._tag_definitions['collision policy']) 137 | 138 | # fully qualified path to folder 139 | self._tag_folder = '[%(provider)s]%(parent)s/%(name)s' % root_parts 140 | 141 | 142 | def step(self): 143 | variable_names = sorted(self._variables) 144 | tag_paths = ['%s/%s' % (self._tag_folder, variable) for variable in variable_names] 145 | tag_paths.append('%s/state' % self._tag_folder) 146 | 147 | if self._tag_definitions['resume']: 148 | values = [qv.value for qv in 149 | system.tag.readBlocking(tag_paths)] 150 | 151 | new_state = values.pop(-1) 152 | if new_state != self.state: 153 | self.state = new_state 154 | 155 | for value, variable in zip(values, variable_names): 156 | self._variables[variable] = value 157 | 158 | super(TagsMixin, self).step() 159 | 160 | values = [value for _,value in sorted(self._variables.items())] 161 | values.append(self.state) 162 | 163 | system.tag.writeAll(tag_paths, values) 164 | -------------------------------------------------------------------------------- /shared/data/toml/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CorsoSource/metatools/a4252820c42c5907bb08cfef4b5817362aa6a09c/shared/data/toml/__init__.py -------------------------------------------------------------------------------- /shared/data/toml/_init.py: -------------------------------------------------------------------------------- 1 | """Python module which parses and emits TOML. 2 | 3 | Released under the MIT license. 4 | 5 | For normal Ignition 8 support, just use 6 | 7 | from shared.data.toml._init import * 8 | """ 9 | 10 | from shared.data.toml.decoder import load, loads, TomlDecoder 11 | from shared.data.toml.encoder import dump, dumps, TomlEncoder 12 | 13 | from shared.data.toml.decoder import TomlDecodeError, TomlPreserveCommentDecoder 14 | from shared.data.toml.encoder import TomlArraySeparatorEncoder, TomlPreserveInlineDictEncoder, TomlNumpyEncoder, TomlPreserveCommentEncoder, TomlPathlibEncoder 15 | 16 | __version__ = "0.10.2" 17 | _spec_ = "0.5.0" 18 | -------------------------------------------------------------------------------- /shared/data/toml/ordered.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict 2 | from shared.data.toml.encoder import TomlEncoder 3 | from shared.data.toml.decoder import TomlDecoder 4 | 5 | 6 | class TomlOrderedDecoder(TomlDecoder): 7 | 8 | def __init__(self): 9 | super(self.__class__, self).__init__(_dict=OrderedDict) 10 | 11 | 12 | class TomlOrderedEncoder(TomlEncoder): 13 | 14 | def __init__(self): 15 | super(self.__class__, self).__init__(_dict=OrderedDict) 16 | -------------------------------------------------------------------------------- /shared/data/toml/tz.py: -------------------------------------------------------------------------------- 1 | from datetime import tzinfo, timedelta 2 | 3 | 4 | class TomlTz(tzinfo): 5 | def __init__(self, toml_offset): 6 | if toml_offset == "Z": 7 | self._raw_offset = "+00:00" 8 | else: 9 | self._raw_offset = toml_offset 10 | self._sign = -1 if self._raw_offset[0] == '-' else 1 11 | self._hours = int(self._raw_offset[1:3]) 12 | self._minutes = int(self._raw_offset[4:6]) 13 | 14 | def __deepcopy__(self, memo): 15 | return self.__class__(self._raw_offset) 16 | 17 | def tzname(self, dt): 18 | return "UTC" + self._raw_offset 19 | 20 | def utcoffset(self, dt): 21 | return self._sign * timedelta(hours=self._hours, minutes=self._minutes) 22 | 23 | def dst(self, dt): 24 | return timedelta(0) 25 | -------------------------------------------------------------------------------- /shared/data/yaml/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CorsoSource/metatools/a4252820c42c5907bb08cfef4b5817362aa6a09c/shared/data/yaml/__init__.py -------------------------------------------------------------------------------- /shared/data/yaml/composer.py: -------------------------------------------------------------------------------- 1 | # PyYAML library 2 | __license__ = 'MIT' 3 | __author__ = 'Kirill Simonov' 4 | __copyright__ = """ 5 | Copyright (c) 2017-2020 Ingy döt Net 6 | Copyright (c) 2006-2016 Kirill Simonov 7 | """ 8 | 9 | # For changes regarding this port for Ignition usage, please contact: 10 | __maintainer__ = 'Andrew Geiger' 11 | __email__ = 'andrew.geiger@corsosystems.com' 12 | 13 | 14 | __all__ = ['Composer', 'ComposerError'] 15 | 16 | from shared.data.yaml.error import MarkedYAMLError 17 | from shared.data.yaml.events import * 18 | from shared.data.yaml.nodes import * 19 | 20 | class ComposerError(MarkedYAMLError): 21 | pass 22 | 23 | class Composer(object): 24 | 25 | def __init__(self): 26 | self.anchors = {} 27 | 28 | def check_node(self): 29 | # Drop the STREAM-START event. 30 | if self.check_event(StreamStartEvent): 31 | self.get_event() 32 | 33 | # If there are more documents available? 34 | return not self.check_event(StreamEndEvent) 35 | 36 | def get_node(self): 37 | # Get the root node of the next document. 38 | if not self.check_event(StreamEndEvent): 39 | return self.compose_document() 40 | 41 | def get_single_node(self): 42 | # Drop the STREAM-START event. 43 | self.get_event() 44 | 45 | # Compose a document if the stream is not empty. 46 | document = None 47 | if not self.check_event(StreamEndEvent): 48 | document = self.compose_document() 49 | 50 | # Ensure that the stream contains no more documents. 51 | if not self.check_event(StreamEndEvent): 52 | event = self.get_event() 53 | raise ComposerError("expected a single document in the stream", 54 | document.start_mark, "but found another document", 55 | event.start_mark) 56 | 57 | # Drop the STREAM-END event. 58 | self.get_event() 59 | 60 | return document 61 | 62 | def compose_document(self): 63 | # Drop the DOCUMENT-START event. 64 | self.get_event() 65 | 66 | # Compose the root node. 67 | node = self.compose_node(None, None) 68 | 69 | # Drop the DOCUMENT-END event. 70 | self.get_event() 71 | 72 | self.anchors = {} 73 | return node 74 | 75 | def compose_node(self, parent, index): 76 | if self.check_event(AliasEvent): 77 | event = self.get_event() 78 | anchor = event.anchor 79 | if anchor not in self.anchors: 80 | raise ComposerError(None, None, "found undefined alias %r" 81 | % anchor.encode('utf-8'), event.start_mark) 82 | return self.anchors[anchor] 83 | event = self.peek_event() 84 | anchor = event.anchor 85 | if anchor is not None: 86 | if anchor in self.anchors: 87 | raise ComposerError("found duplicate anchor %r; first occurrence" 88 | % anchor.encode('utf-8'), self.anchors[anchor].start_mark, 89 | "second occurrence", event.start_mark) 90 | self.descend_resolver(parent, index) 91 | if self.check_event(ScalarEvent): 92 | node = self.compose_scalar_node(anchor) 93 | elif self.check_event(SequenceStartEvent): 94 | node = self.compose_sequence_node(anchor) 95 | elif self.check_event(MappingStartEvent): 96 | node = self.compose_mapping_node(anchor) 97 | self.ascend_resolver() 98 | return node 99 | 100 | def compose_scalar_node(self, anchor): 101 | event = self.get_event() 102 | tag = event.tag 103 | if tag is None or tag == u'!': 104 | tag = self.resolve(ScalarNode, event.value, event.implicit) 105 | node = ScalarNode(tag, event.value, 106 | event.start_mark, event.end_mark, style=event.style) 107 | if anchor is not None: 108 | self.anchors[anchor] = node 109 | return node 110 | 111 | def compose_sequence_node(self, anchor): 112 | start_event = self.get_event() 113 | tag = start_event.tag 114 | if tag is None or tag == u'!': 115 | tag = self.resolve(SequenceNode, None, start_event.implicit) 116 | node = SequenceNode(tag, [], 117 | start_event.start_mark, None, 118 | flow_style=start_event.flow_style) 119 | if anchor is not None: 120 | self.anchors[anchor] = node 121 | index = 0 122 | while not self.check_event(SequenceEndEvent): 123 | node.value.append(self.compose_node(node, index)) 124 | index += 1 125 | end_event = self.get_event() 126 | node.end_mark = end_event.end_mark 127 | return node 128 | 129 | def compose_mapping_node(self, anchor): 130 | start_event = self.get_event() 131 | tag = start_event.tag 132 | if tag is None or tag == u'!': 133 | tag = self.resolve(MappingNode, None, start_event.implicit) 134 | node = MappingNode(tag, [], 135 | start_event.start_mark, None, 136 | flow_style=start_event.flow_style) 137 | if anchor is not None: 138 | self.anchors[anchor] = node 139 | while not self.check_event(MappingEndEvent): 140 | #key_event = self.peek_event() 141 | item_key = self.compose_node(node, None) 142 | #if item_key in node.value: 143 | # raise ComposerError("while composing a mapping", start_event.start_mark, 144 | # "found duplicate key", key_event.start_mark) 145 | item_value = self.compose_node(node, item_key) 146 | #node.value[item_key] = item_value 147 | node.value.append((item_key, item_value)) 148 | end_event = self.get_event() 149 | node.end_mark = end_event.end_mark 150 | return node 151 | -------------------------------------------------------------------------------- /shared/data/yaml/dumper.py: -------------------------------------------------------------------------------- 1 | # PyYAML library 2 | __license__ = 'MIT' 3 | __author__ = 'Kirill Simonov' 4 | __copyright__ = """ 5 | Copyright (c) 2017-2020 Ingy döt Net 6 | Copyright (c) 2006-2016 Kirill Simonov 7 | """ 8 | 9 | # For changes regarding this port for Ignition usage, please contact: 10 | __maintainer__ = 'Andrew Geiger' 11 | __email__ = 'andrew.geiger@corsosystems.com' 12 | 13 | 14 | __all__ = ['BaseDumper', 'SafeDumper', 'Dumper'] 15 | 16 | from shared.data.yaml.emitter import * 17 | from shared.data.yaml.serializer import * 18 | from shared.data.yaml.representer import * 19 | from shared.data.yaml.resolver import * 20 | 21 | class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): 22 | 23 | def __init__(self, stream, 24 | default_style=None, default_flow_style=False, 25 | canonical=None, indent=None, width=None, 26 | allow_unicode=None, line_break=None, 27 | encoding=None, explicit_start=None, explicit_end=None, 28 | version=None, tags=None, sort_keys=True): 29 | Emitter.__init__(self, stream, canonical=canonical, 30 | indent=indent, width=width, 31 | allow_unicode=allow_unicode, line_break=line_break) 32 | Serializer.__init__(self, encoding=encoding, 33 | explicit_start=explicit_start, explicit_end=explicit_end, 34 | version=version, tags=tags) 35 | Representer.__init__(self, default_style=default_style, 36 | default_flow_style=default_flow_style, sort_keys=sort_keys) 37 | Resolver.__init__(self) 38 | 39 | class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): 40 | 41 | def __init__(self, stream, 42 | default_style=None, default_flow_style=False, 43 | canonical=None, indent=None, width=None, 44 | allow_unicode=None, line_break=None, 45 | encoding=None, explicit_start=None, explicit_end=None, 46 | version=None, tags=None, sort_keys=True): 47 | Emitter.__init__(self, stream, canonical=canonical, 48 | indent=indent, width=width, 49 | allow_unicode=allow_unicode, line_break=line_break) 50 | Serializer.__init__(self, encoding=encoding, 51 | explicit_start=explicit_start, explicit_end=explicit_end, 52 | version=version, tags=tags) 53 | SafeRepresenter.__init__(self, default_style=default_style, 54 | default_flow_style=default_flow_style, sort_keys=sort_keys) 55 | Resolver.__init__(self) 56 | 57 | class Dumper(Emitter, Serializer, Representer, Resolver): 58 | 59 | def __init__(self, stream, 60 | default_style=None, default_flow_style=False, 61 | canonical=None, indent=None, width=None, 62 | allow_unicode=None, line_break=None, 63 | encoding=None, explicit_start=None, explicit_end=None, 64 | version=None, tags=None, sort_keys=True): 65 | Emitter.__init__(self, stream, canonical=canonical, 66 | indent=indent, width=width, 67 | allow_unicode=allow_unicode, line_break=line_break) 68 | Serializer.__init__(self, encoding=encoding, 69 | explicit_start=explicit_start, explicit_end=explicit_end, 70 | version=version, tags=tags) 71 | Representer.__init__(self, default_style=default_style, 72 | default_flow_style=default_flow_style, sort_keys=sort_keys) 73 | Resolver.__init__(self) 74 | -------------------------------------------------------------------------------- /shared/data/yaml/error.py: -------------------------------------------------------------------------------- 1 | # PyYAML library 2 | __license__ = 'MIT' 3 | __author__ = 'Kirill Simonov' 4 | __copyright__ = """ 5 | Copyright (c) 2017-2020 Ingy döt Net 6 | Copyright (c) 2006-2016 Kirill Simonov 7 | """ 8 | 9 | # For changes regarding this port for Ignition usage, please contact: 10 | __maintainer__ = 'Andrew Geiger' 11 | __email__ = 'andrew.geiger@corsosystems.com' 12 | 13 | 14 | __all__ = ['Mark', 'YAMLError', 'MarkedYAMLError'] 15 | 16 | class Mark(object): 17 | 18 | def __init__(self, name, index, line, column, buffer, pointer): 19 | self.name = name 20 | self.index = index 21 | self.line = line 22 | self.column = column 23 | self.buffer = buffer 24 | self.pointer = pointer 25 | 26 | def get_snippet(self, indent=4, max_length=75): 27 | if self.buffer is None: 28 | return None 29 | head = '' 30 | start = self.pointer 31 | while start > 0 and self.buffer[start-1] not in u'\0\r\n\x85\u2028\u2029': 32 | start -= 1 33 | if self.pointer-start > max_length/2-1: 34 | head = ' ... ' 35 | start += 5 36 | break 37 | tail = '' 38 | end = self.pointer 39 | while end < len(self.buffer) and self.buffer[end] not in u'\0\r\n\x85\u2028\u2029': 40 | end += 1 41 | if end-self.pointer > max_length/2-1: 42 | tail = ' ... ' 43 | end -= 5 44 | break 45 | snippet = self.buffer[start:end].encode('utf-8') 46 | return ' '*indent + head + snippet + tail + '\n' \ 47 | + ' '*(indent+self.pointer-start+len(head)) + '^' 48 | 49 | def __str__(self): 50 | snippet = self.get_snippet() 51 | where = " in \"%s\", line %d, column %d" \ 52 | % (self.name, self.line+1, self.column+1) 53 | if snippet is not None: 54 | where += ":\n"+snippet 55 | return where 56 | 57 | class YAMLError(Exception): 58 | pass 59 | 60 | class MarkedYAMLError(YAMLError): 61 | 62 | def __init__(self, context=None, context_mark=None, 63 | problem=None, problem_mark=None, note=None): 64 | self.context = context 65 | self.context_mark = context_mark 66 | self.problem = problem 67 | self.problem_mark = problem_mark 68 | self.note = note 69 | 70 | def __str__(self): 71 | lines = [] 72 | if self.context is not None: 73 | lines.append(self.context) 74 | if self.context_mark is not None \ 75 | and (self.problem is None or self.problem_mark is None 76 | or self.context_mark.name != self.problem_mark.name 77 | or self.context_mark.line != self.problem_mark.line 78 | or self.context_mark.column != self.problem_mark.column): 79 | lines.append(str(self.context_mark)) 80 | if self.problem is not None: 81 | lines.append(self.problem) 82 | if self.problem_mark is not None: 83 | lines.append(str(self.problem_mark)) 84 | if self.note is not None: 85 | lines.append(self.note) 86 | return '\n'.join(lines) 87 | -------------------------------------------------------------------------------- /shared/data/yaml/events.py: -------------------------------------------------------------------------------- 1 | # PyYAML library 2 | __license__ = 'MIT' 3 | __author__ = 'Kirill Simonov' 4 | __copyright__ = """ 5 | Copyright (c) 2017-2020 Ingy döt Net 6 | Copyright (c) 2006-2016 Kirill Simonov 7 | """ 8 | 9 | # For changes regarding this port for Ignition usage, please contact: 10 | __maintainer__ = 'Andrew Geiger' 11 | __email__ = 'andrew.geiger@corsosystems.com' 12 | 13 | 14 | __all__ = [ 15 | 'AliasEvent', 'CollectionEndEvent', 'CollectionStartEvent', 16 | 'DocumentEndEvent', 'DocumentStartEvent', 'Event', 17 | 'MappingEndEvent', 'MappingStartEvent', 'NodeEvent', 18 | 'ScalarEvent', 'SequenceEndEvent', 'SequenceStartEvent', 19 | 'StreamEndEvent', 'StreamStartEvent' 20 | ] 21 | 22 | # Abstract classes. 23 | 24 | class Event(object): 25 | def __init__(self, start_mark=None, end_mark=None): 26 | self.start_mark = start_mark 27 | self.end_mark = end_mark 28 | def __repr__(self): 29 | attributes = [key for key in ['anchor', 'tag', 'implicit', 'value'] 30 | if hasattr(self, key)] 31 | arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) 32 | for key in attributes]) 33 | return '%s(%s)' % (self.__class__.__name__, arguments) 34 | 35 | class NodeEvent(Event): 36 | def __init__(self, anchor, start_mark=None, end_mark=None): 37 | self.anchor = anchor 38 | self.start_mark = start_mark 39 | self.end_mark = end_mark 40 | 41 | class CollectionStartEvent(NodeEvent): 42 | def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None, 43 | flow_style=None): 44 | self.anchor = anchor 45 | self.tag = tag 46 | self.implicit = implicit 47 | self.start_mark = start_mark 48 | self.end_mark = end_mark 49 | self.flow_style = flow_style 50 | 51 | class CollectionEndEvent(Event): 52 | pass 53 | 54 | # Implementations. 55 | 56 | class StreamStartEvent(Event): 57 | def __init__(self, start_mark=None, end_mark=None, encoding=None): 58 | self.start_mark = start_mark 59 | self.end_mark = end_mark 60 | self.encoding = encoding 61 | 62 | class StreamEndEvent(Event): 63 | pass 64 | 65 | class DocumentStartEvent(Event): 66 | def __init__(self, start_mark=None, end_mark=None, 67 | explicit=None, version=None, tags=None): 68 | self.start_mark = start_mark 69 | self.end_mark = end_mark 70 | self.explicit = explicit 71 | self.version = version 72 | self.tags = tags 73 | 74 | class DocumentEndEvent(Event): 75 | def __init__(self, start_mark=None, end_mark=None, 76 | explicit=None): 77 | self.start_mark = start_mark 78 | self.end_mark = end_mark 79 | self.explicit = explicit 80 | 81 | class AliasEvent(NodeEvent): 82 | pass 83 | 84 | class ScalarEvent(NodeEvent): 85 | def __init__(self, anchor, tag, implicit, value, 86 | start_mark=None, end_mark=None, style=None): 87 | self.anchor = anchor 88 | self.tag = tag 89 | self.implicit = implicit 90 | self.value = value 91 | self.start_mark = start_mark 92 | self.end_mark = end_mark 93 | self.style = style 94 | 95 | class SequenceStartEvent(CollectionStartEvent): 96 | pass 97 | 98 | class SequenceEndEvent(CollectionEndEvent): 99 | pass 100 | 101 | class MappingStartEvent(CollectionStartEvent): 102 | pass 103 | 104 | class MappingEndEvent(CollectionEndEvent): 105 | pass 106 | -------------------------------------------------------------------------------- /shared/data/yaml/loader.py: -------------------------------------------------------------------------------- 1 | # PyYAML library 2 | __license__ = 'MIT' 3 | __author__ = 'Kirill Simonov' 4 | __copyright__ = """ 5 | Copyright (c) 2017-2020 Ingy döt Net 6 | Copyright (c) 2006-2016 Kirill Simonov 7 | """ 8 | 9 | # For changes regarding this port for Ignition usage, please contact: 10 | __maintainer__ = 'Andrew Geiger' 11 | __email__ = 'andrew.geiger@corsosystems.com' 12 | 13 | 14 | __all__ = ['BaseLoader', 'FullLoader', 'SafeLoader', 'Loader', 'UnsafeLoader'] 15 | 16 | from shared.data.yaml.reader import * 17 | from shared.data.yaml.scanner import * 18 | from shared.data.yaml.parser import * 19 | from shared.data.yaml.composer import * 20 | from shared.data.yaml.constructor import * 21 | from shared.data.yaml.resolver import * 22 | 23 | class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): 24 | 25 | def __init__(self, stream): 26 | Reader.__init__(self, stream) 27 | Scanner.__init__(self) 28 | Parser.__init__(self) 29 | Composer.__init__(self) 30 | BaseConstructor.__init__(self) 31 | BaseResolver.__init__(self) 32 | 33 | class FullLoader(Reader, Scanner, Parser, Composer, FullConstructor, Resolver): 34 | 35 | def __init__(self, stream): 36 | Reader.__init__(self, stream) 37 | Scanner.__init__(self) 38 | Parser.__init__(self) 39 | Composer.__init__(self) 40 | FullConstructor.__init__(self) 41 | Resolver.__init__(self) 42 | 43 | class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): 44 | 45 | def __init__(self, stream): 46 | Reader.__init__(self, stream) 47 | Scanner.__init__(self) 48 | Parser.__init__(self) 49 | Composer.__init__(self) 50 | SafeConstructor.__init__(self) 51 | Resolver.__init__(self) 52 | 53 | class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): 54 | 55 | def __init__(self, stream): 56 | Reader.__init__(self, stream) 57 | Scanner.__init__(self) 58 | Parser.__init__(self) 59 | Composer.__init__(self) 60 | Constructor.__init__(self) 61 | Resolver.__init__(self) 62 | 63 | # UnsafeLoader is the same as Loader (which is and was always unsafe on 64 | # untrusted input). Use of either Loader or UnsafeLoader should be rare, since 65 | # FullLoad should be able to load almost all YAML safely. Loader is left intact 66 | # to ensure backwards compatibility. 67 | class UnsafeLoader(Reader, Scanner, Parser, Composer, Constructor, Resolver): 68 | 69 | def __init__(self, stream): 70 | Reader.__init__(self, stream) 71 | Scanner.__init__(self) 72 | Parser.__init__(self) 73 | Composer.__init__(self) 74 | Constructor.__init__(self) 75 | Resolver.__init__(self) 76 | -------------------------------------------------------------------------------- /shared/data/yaml/nodes.py: -------------------------------------------------------------------------------- 1 | # PyYAML library 2 | __license__ = 'MIT' 3 | __author__ = 'Kirill Simonov' 4 | __copyright__ = """ 5 | Copyright (c) 2017-2020 Ingy döt Net 6 | Copyright (c) 2006-2016 Kirill Simonov 7 | """ 8 | 9 | # For changes regarding this port for Ignition usage, please contact: 10 | __maintainer__ = 'Andrew Geiger' 11 | __email__ = 'andrew.geiger@corsosystems.com' 12 | 13 | 14 | __all__ = ['CollectionNode', 'MappingNode', 'Node', 'ScalarNode', 'SequenceNode'] 15 | 16 | 17 | class Node(object): 18 | def __init__(self, tag, value, start_mark, end_mark): 19 | self.tag = tag 20 | self.value = value 21 | self.start_mark = start_mark 22 | self.end_mark = end_mark 23 | def __repr__(self): 24 | value = self.value 25 | #if isinstance(value, list): 26 | # if len(value) == 0: 27 | # value = '' 28 | # elif len(value) == 1: 29 | # value = '<1 item>' 30 | # else: 31 | # value = '<%d items>' % len(value) 32 | #else: 33 | # if len(value) > 75: 34 | # value = repr(value[:70]+u' ... ') 35 | # else: 36 | # value = repr(value) 37 | value = repr(value) 38 | return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value) 39 | 40 | class ScalarNode(Node): 41 | id = 'scalar' 42 | def __init__(self, tag, value, 43 | start_mark=None, end_mark=None, style=None): 44 | self.tag = tag 45 | self.value = value 46 | self.start_mark = start_mark 47 | self.end_mark = end_mark 48 | self.style = style 49 | 50 | class CollectionNode(Node): 51 | def __init__(self, tag, value, 52 | start_mark=None, end_mark=None, flow_style=None): 53 | self.tag = tag 54 | self.value = value 55 | self.start_mark = start_mark 56 | self.end_mark = end_mark 57 | self.flow_style = flow_style 58 | 59 | class SequenceNode(CollectionNode): 60 | id = 'sequence' 61 | 62 | class MappingNode(CollectionNode): 63 | id = 'mapping' 64 | -------------------------------------------------------------------------------- /shared/data/yaml/reader.py: -------------------------------------------------------------------------------- 1 | # PyYAML library 2 | __license__ = 'MIT' 3 | __author__ = 'Kirill Simonov' 4 | __copyright__ = """ 5 | Copyright (c) 2017-2020 Ingy döt Net 6 | Copyright (c) 2006-2016 Kirill Simonov 7 | """ 8 | 9 | # For changes regarding this port for Ignition usage, please contact: 10 | __maintainer__ = 'Andrew Geiger' 11 | __email__ = 'andrew.geiger@corsosystems.com' 12 | 13 | 14 | # This module contains abstractions for the input stream. You don't have to 15 | # looks further, there are no pretty code. 16 | # 17 | # We define two classes here. 18 | # 19 | # Mark(source, line, column) 20 | # It's just a record and its only use is producing nice error messages. 21 | # Parser does not use it for any other purposes. 22 | # 23 | # Reader(source, data) 24 | # Reader determines the encoding of `data` and converts it to unicode. 25 | # Reader provides the following methods and attributes: 26 | # reader.peek(length=1) - return the next `length` characters 27 | # reader.forward(length=1) - move the current position to `length` characters. 28 | # reader.index - the number of the current character. 29 | # reader.line, stream.column - the line and the column of the current character. 30 | 31 | __all__ = ['Reader', 'ReaderError'] 32 | 33 | from shared.data.yaml.error import YAMLError, Mark 34 | 35 | import codecs, re, sys 36 | 37 | has_ucs4 = sys.maxunicode > 0xffff 38 | 39 | class ReaderError(YAMLError): 40 | 41 | def __init__(self, name, position, character, encoding, reason): 42 | self.name = name 43 | self.character = character 44 | self.position = position 45 | self.encoding = encoding 46 | self.reason = reason 47 | 48 | def __str__(self): 49 | if isinstance(self.character, str): 50 | return "'%s' codec can't decode byte #x%02x: %s\n" \ 51 | " in \"%s\", position %d" \ 52 | % (self.encoding, ord(self.character), self.reason, 53 | self.name, self.position) 54 | else: 55 | return "unacceptable character #x%04x: %s\n" \ 56 | " in \"%s\", position %d" \ 57 | % (self.character, self.reason, 58 | self.name, self.position) 59 | 60 | class Reader(object): 61 | # Reader: 62 | # - determines the data encoding and converts it to unicode, 63 | # - checks if characters are in allowed range, 64 | # - adds '\0' to the end. 65 | 66 | # Reader accepts 67 | # - a `str` object, 68 | # - a `unicode` object, 69 | # - a file-like object with its `read` method returning `str`, 70 | # - a file-like object with its `read` method returning `unicode`. 71 | 72 | # Yeah, it's ugly and slow. 73 | 74 | def __init__(self, stream): 75 | self.name = None 76 | self.stream = None 77 | self.stream_pointer = 0 78 | self.eof = True 79 | self.buffer = u'' 80 | self.pointer = 0 81 | self.raw_buffer = None 82 | self.raw_decode = None 83 | self.encoding = None 84 | self.index = 0 85 | self.line = 0 86 | self.column = 0 87 | if isinstance(stream, unicode): 88 | self.name = "" 89 | self.check_printable(stream) 90 | self.buffer = stream+u'\0' 91 | elif isinstance(stream, str): 92 | self.name = "" 93 | self.raw_buffer = stream 94 | self.determine_encoding() 95 | else: 96 | self.stream = stream 97 | self.name = getattr(stream, 'name', "") 98 | self.eof = False 99 | self.raw_buffer = '' 100 | self.determine_encoding() 101 | 102 | def peek(self, index=0): 103 | try: 104 | return self.buffer[self.pointer+index] 105 | except IndexError: 106 | self.update(index+1) 107 | return self.buffer[self.pointer+index] 108 | 109 | def prefix(self, length=1): 110 | if self.pointer+length >= len(self.buffer): 111 | self.update(length) 112 | return self.buffer[self.pointer:self.pointer+length] 113 | 114 | def forward(self, length=1): 115 | if self.pointer+length+1 >= len(self.buffer): 116 | self.update(length+1) 117 | while length: 118 | ch = self.buffer[self.pointer] 119 | self.pointer += 1 120 | self.index += 1 121 | if ch in u'\n\x85\u2028\u2029' \ 122 | or (ch == u'\r' and self.buffer[self.pointer] != u'\n'): 123 | self.line += 1 124 | self.column = 0 125 | elif ch != u'\uFEFF': 126 | self.column += 1 127 | length -= 1 128 | 129 | def get_mark(self): 130 | if self.stream is None: 131 | return Mark(self.name, self.index, self.line, self.column, 132 | self.buffer, self.pointer) 133 | else: 134 | return Mark(self.name, self.index, self.line, self.column, 135 | None, None) 136 | 137 | def determine_encoding(self): 138 | while not self.eof and len(self.raw_buffer) < 2: 139 | self.update_raw() 140 | if not isinstance(self.raw_buffer, unicode): 141 | if self.raw_buffer.startswith(codecs.BOM_UTF16_LE): 142 | self.raw_decode = codecs.utf_16_le_decode 143 | self.encoding = 'utf-16-le' 144 | elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE): 145 | self.raw_decode = codecs.utf_16_be_decode 146 | self.encoding = 'utf-16-be' 147 | else: 148 | self.raw_decode = codecs.utf_8_decode 149 | self.encoding = 'utf-8' 150 | self.update(1) 151 | 152 | if has_ucs4: 153 | NON_PRINTABLE = re.compile(u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD\U00010000-\U0010ffff]') 154 | else: 155 | NON_PRINTABLE = re.compile(u"[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uFFFD]"+"|"+"(?:^|[^\uD800-\uDBFF])[\uDC00-\uDFFF]|[\uD800-\uDBFF](?:[^\uDC00-\uDFFF]|$)") 156 | def check_printable(self, data): 157 | match = self.NON_PRINTABLE.search(data) 158 | if match: 159 | character = match.group() 160 | position = self.index+(len(self.buffer)-self.pointer)+match.start() 161 | raise ReaderError(self.name, position, ord(character), 162 | 'unicode', "special characters are not allowed") 163 | 164 | def update(self, length): 165 | if self.raw_buffer is None: 166 | return 167 | self.buffer = self.buffer[self.pointer:] 168 | self.pointer = 0 169 | while len(self.buffer) < length: 170 | if not self.eof: 171 | self.update_raw() 172 | if self.raw_decode is not None: 173 | try: 174 | data, converted = self.raw_decode(self.raw_buffer, 175 | 'strict', self.eof) 176 | except UnicodeDecodeError, exc: 177 | character = exc.object[exc.start] 178 | if self.stream is not None: 179 | position = self.stream_pointer-len(self.raw_buffer)+exc.start 180 | else: 181 | position = exc.start 182 | raise ReaderError(self.name, position, character, 183 | exc.encoding, exc.reason) 184 | else: 185 | data = self.raw_buffer 186 | converted = len(data) 187 | self.check_printable(data) 188 | self.buffer += data 189 | self.raw_buffer = self.raw_buffer[converted:] 190 | if self.eof: 191 | self.buffer += u'\0' 192 | self.raw_buffer = None 193 | break 194 | 195 | def update_raw(self, size=1024): 196 | data = self.stream.read(size) 197 | if data: 198 | self.raw_buffer += data 199 | self.stream_pointer += len(data) 200 | else: 201 | self.eof = True 202 | -------------------------------------------------------------------------------- /shared/data/yaml/resolver.py: -------------------------------------------------------------------------------- 1 | # PyYAML library 2 | __license__ = 'MIT' 3 | __author__ = 'Kirill Simonov' 4 | __copyright__ = """ 5 | Copyright (c) 2017-2020 Ingy döt Net 6 | Copyright (c) 2006-2016 Kirill Simonov 7 | """ 8 | 9 | # For changes regarding this port for Ignition usage, please contact: 10 | __maintainer__ = 'Andrew Geiger' 11 | __email__ = 'andrew.geiger@corsosystems.com' 12 | 13 | 14 | __all__ = ['BaseResolver', 'Resolver'] 15 | 16 | from shared.data.yaml.error import * 17 | from shared.data.yaml.nodes import * 18 | 19 | import re 20 | 21 | class ResolverError(YAMLError): 22 | pass 23 | 24 | class BaseResolver(object): 25 | 26 | DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str' 27 | DEFAULT_SEQUENCE_TAG = u'tag:yaml.org,2002:seq' 28 | DEFAULT_MAPPING_TAG = u'tag:yaml.org,2002:map' 29 | 30 | yaml_implicit_resolvers = {} 31 | yaml_path_resolvers = {} 32 | 33 | def __init__(self): 34 | self.resolver_exact_paths = [] 35 | self.resolver_prefix_paths = [] 36 | 37 | def add_implicit_resolver(cls, tag, regexp, first): 38 | if not 'yaml_implicit_resolvers' in cls.__dict__: 39 | implicit_resolvers = {} 40 | for key in cls.yaml_implicit_resolvers: 41 | implicit_resolvers[key] = cls.yaml_implicit_resolvers[key][:] 42 | cls.yaml_implicit_resolvers = implicit_resolvers 43 | if first is None: 44 | first = [None] 45 | for ch in first: 46 | cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp)) 47 | add_implicit_resolver = classmethod(add_implicit_resolver) 48 | 49 | def add_path_resolver(cls, tag, path, kind=None): 50 | # Note: `add_path_resolver` is experimental. The API could be changed. 51 | # `new_path` is a pattern that is matched against the path from the 52 | # root to the node that is being considered. `node_path` elements are 53 | # tuples `(node_check, index_check)`. `node_check` is a node class: 54 | # `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None` 55 | # matches any kind of a node. `index_check` could be `None`, a boolean 56 | # value, a string value, or a number. `None` and `False` match against 57 | # any _value_ of sequence and mapping nodes. `True` matches against 58 | # any _key_ of a mapping node. A string `index_check` matches against 59 | # a mapping value that corresponds to a scalar key which content is 60 | # equal to the `index_check` value. An integer `index_check` matches 61 | # against a sequence value with the index equal to `index_check`. 62 | if not 'yaml_path_resolvers' in cls.__dict__: 63 | cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy() 64 | new_path = [] 65 | for element in path: 66 | if isinstance(element, (list, tuple)): 67 | if len(element) == 2: 68 | node_check, index_check = element 69 | elif len(element) == 1: 70 | node_check = element[0] 71 | index_check = True 72 | else: 73 | raise ResolverError("Invalid path element: %s" % element) 74 | else: 75 | node_check = None 76 | index_check = element 77 | if node_check is str: 78 | node_check = ScalarNode 79 | elif node_check is list: 80 | node_check = SequenceNode 81 | elif node_check is dict: 82 | node_check = MappingNode 83 | elif node_check not in [ScalarNode, SequenceNode, MappingNode] \ 84 | and not isinstance(node_check, basestring) \ 85 | and node_check is not None: 86 | raise ResolverError("Invalid node checker: %s" % node_check) 87 | if not isinstance(index_check, (basestring, int)) \ 88 | and index_check is not None: 89 | raise ResolverError("Invalid index checker: %s" % index_check) 90 | new_path.append((node_check, index_check)) 91 | if kind is str: 92 | kind = ScalarNode 93 | elif kind is list: 94 | kind = SequenceNode 95 | elif kind is dict: 96 | kind = MappingNode 97 | elif kind not in [ScalarNode, SequenceNode, MappingNode] \ 98 | and kind is not None: 99 | raise ResolverError("Invalid node kind: %s" % kind) 100 | cls.yaml_path_resolvers[tuple(new_path), kind] = tag 101 | add_path_resolver = classmethod(add_path_resolver) 102 | 103 | def descend_resolver(self, current_node, current_index): 104 | if not self.yaml_path_resolvers: 105 | return 106 | exact_paths = {} 107 | prefix_paths = [] 108 | if current_node: 109 | depth = len(self.resolver_prefix_paths) 110 | for path, kind in self.resolver_prefix_paths[-1]: 111 | if self.check_resolver_prefix(depth, path, kind, 112 | current_node, current_index): 113 | if len(path) > depth: 114 | prefix_paths.append((path, kind)) 115 | else: 116 | exact_paths[kind] = self.yaml_path_resolvers[path, kind] 117 | else: 118 | for path, kind in self.yaml_path_resolvers: 119 | if not path: 120 | exact_paths[kind] = self.yaml_path_resolvers[path, kind] 121 | else: 122 | prefix_paths.append((path, kind)) 123 | self.resolver_exact_paths.append(exact_paths) 124 | self.resolver_prefix_paths.append(prefix_paths) 125 | 126 | def ascend_resolver(self): 127 | if not self.yaml_path_resolvers: 128 | return 129 | self.resolver_exact_paths.pop() 130 | self.resolver_prefix_paths.pop() 131 | 132 | def check_resolver_prefix(self, depth, path, kind, 133 | current_node, current_index): 134 | node_check, index_check = path[depth-1] 135 | if isinstance(node_check, basestring): 136 | if current_node.tag != node_check: 137 | return 138 | elif node_check is not None: 139 | if not isinstance(current_node, node_check): 140 | return 141 | if index_check is True and current_index is not None: 142 | return 143 | if (index_check is False or index_check is None) \ 144 | and current_index is None: 145 | return 146 | if isinstance(index_check, basestring): 147 | if not (isinstance(current_index, ScalarNode) 148 | and index_check == current_index.value): 149 | return 150 | elif isinstance(index_check, int) and not isinstance(index_check, bool): 151 | if index_check != current_index: 152 | return 153 | return True 154 | 155 | def resolve(self, kind, value, implicit): 156 | if kind is ScalarNode and implicit[0]: 157 | if value == u'': 158 | resolvers = self.yaml_implicit_resolvers.get(u'', []) 159 | else: 160 | resolvers = self.yaml_implicit_resolvers.get(value[0], []) 161 | resolvers += self.yaml_implicit_resolvers.get(None, []) 162 | for tag, regexp in resolvers: 163 | if regexp.match(value): 164 | return tag 165 | implicit = implicit[1] 166 | if self.yaml_path_resolvers: 167 | exact_paths = self.resolver_exact_paths[-1] 168 | if kind in exact_paths: 169 | return exact_paths[kind] 170 | if None in exact_paths: 171 | return exact_paths[None] 172 | if kind is ScalarNode: 173 | return self.DEFAULT_SCALAR_TAG 174 | elif kind is SequenceNode: 175 | return self.DEFAULT_SEQUENCE_TAG 176 | elif kind is MappingNode: 177 | return self.DEFAULT_MAPPING_TAG 178 | 179 | class Resolver(BaseResolver): 180 | pass 181 | 182 | Resolver.add_implicit_resolver( 183 | u'tag:yaml.org,2002:bool', 184 | re.compile(ur'''^(?:yes|Yes|YES|no|No|NO 185 | |true|True|TRUE|false|False|FALSE 186 | |on|On|ON|off|Off|OFF)$''', re.X), 187 | list(u'yYnNtTfFoO')) 188 | 189 | Resolver.add_implicit_resolver( 190 | u'tag:yaml.org,2002:float', 191 | re.compile(ur'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)? 192 | |\.[0-9_]+(?:[eE][-+][0-9]+)? 193 | |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]* 194 | |[-+]?\.(?:inf|Inf|INF) 195 | |\.(?:nan|NaN|NAN))$''', re.X), 196 | list(u'-+0123456789.')) 197 | 198 | Resolver.add_implicit_resolver( 199 | u'tag:yaml.org,2002:int', 200 | re.compile(ur'''^(?:[-+]?0b[0-1_]+ 201 | |[-+]?0[0-7_]+ 202 | |[-+]?(?:0|[1-9][0-9_]*) 203 | |[-+]?0x[0-9a-fA-F_]+ 204 | |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X), 205 | list(u'-+0123456789')) 206 | 207 | Resolver.add_implicit_resolver( 208 | u'tag:yaml.org,2002:merge', 209 | re.compile(ur'^(?:<<)$'), 210 | [u'<']) 211 | 212 | Resolver.add_implicit_resolver( 213 | u'tag:yaml.org,2002:null', 214 | re.compile(ur'''^(?: ~ 215 | |null|Null|NULL 216 | | )$''', re.X), 217 | [u'~', u'n', u'N', u'']) 218 | 219 | Resolver.add_implicit_resolver( 220 | u'tag:yaml.org,2002:timestamp', 221 | re.compile(ur'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] 222 | |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]? 223 | (?:[Tt]|[ \t]+)[0-9][0-9]? 224 | :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)? 225 | (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X), 226 | list(u'0123456789')) 227 | 228 | Resolver.add_implicit_resolver( 229 | u'tag:yaml.org,2002:value', 230 | re.compile(ur'^(?:=)$'), 231 | [u'=']) 232 | 233 | # The following resolver is only for documentation purposes. It cannot work 234 | # because plain scalars cannot start with '!', '&', or '*'. 235 | Resolver.add_implicit_resolver( 236 | u'tag:yaml.org,2002:yaml', 237 | re.compile(ur'^(?:!|&|\*)$'), 238 | list(u'!&*')) 239 | -------------------------------------------------------------------------------- /shared/data/yaml/serializer.py: -------------------------------------------------------------------------------- 1 | # PyYAML library 2 | __license__ = 'MIT' 3 | __author__ = 'Kirill Simonov' 4 | __copyright__ = """ 5 | Copyright (c) 2017-2020 Ingy döt Net 6 | Copyright (c) 2006-2016 Kirill Simonov 7 | """ 8 | 9 | # For changes regarding this port for Ignition usage, please contact: 10 | __maintainer__ = 'Andrew Geiger' 11 | __email__ = 'andrew.geiger@corsosystems.com' 12 | 13 | 14 | __all__ = ['Serializer', 'SerializerError'] 15 | 16 | from shared.data.yaml.error import YAMLError 17 | from shared.data.yaml.events import * 18 | from shared.data.yaml.nodes import * 19 | 20 | class SerializerError(YAMLError): 21 | pass 22 | 23 | class Serializer(object): 24 | 25 | ANCHOR_TEMPLATE = u'id%03d' 26 | 27 | def __init__(self, encoding=None, 28 | explicit_start=None, explicit_end=None, version=None, tags=None): 29 | self.use_encoding = encoding 30 | self.use_explicit_start = explicit_start 31 | self.use_explicit_end = explicit_end 32 | self.use_version = version 33 | self.use_tags = tags 34 | self.serialized_nodes = {} 35 | self.anchors = {} 36 | self.last_anchor_id = 0 37 | self.closed = None 38 | 39 | def open(self): 40 | if self.closed is None: 41 | self.emit(StreamStartEvent(encoding=self.use_encoding)) 42 | self.closed = False 43 | elif self.closed: 44 | raise SerializerError("serializer is closed") 45 | else: 46 | raise SerializerError("serializer is already opened") 47 | 48 | def close(self): 49 | if self.closed is None: 50 | raise SerializerError("serializer is not opened") 51 | elif not self.closed: 52 | self.emit(StreamEndEvent()) 53 | self.closed = True 54 | 55 | #def __del__(self): 56 | # self.close() 57 | 58 | def serialize(self, node): 59 | if self.closed is None: 60 | raise SerializerError("serializer is not opened") 61 | elif self.closed: 62 | raise SerializerError("serializer is closed") 63 | self.emit(DocumentStartEvent(explicit=self.use_explicit_start, 64 | version=self.use_version, tags=self.use_tags)) 65 | self.anchor_node(node) 66 | self.serialize_node(node, None, None) 67 | self.emit(DocumentEndEvent(explicit=self.use_explicit_end)) 68 | self.serialized_nodes = {} 69 | self.anchors = {} 70 | self.last_anchor_id = 0 71 | 72 | def anchor_node(self, node): 73 | if node in self.anchors: 74 | if self.anchors[node] is None: 75 | self.anchors[node] = self.generate_anchor(node) 76 | else: 77 | self.anchors[node] = None 78 | if isinstance(node, SequenceNode): 79 | for item in node.value: 80 | self.anchor_node(item) 81 | elif isinstance(node, MappingNode): 82 | for key, value in node.value: 83 | self.anchor_node(key) 84 | self.anchor_node(value) 85 | 86 | def generate_anchor(self, node): 87 | self.last_anchor_id += 1 88 | return self.ANCHOR_TEMPLATE % self.last_anchor_id 89 | 90 | def serialize_node(self, node, parent, index): 91 | alias = self.anchors[node] 92 | if node in self.serialized_nodes: 93 | self.emit(AliasEvent(alias)) 94 | else: 95 | self.serialized_nodes[node] = True 96 | self.descend_resolver(parent, index) 97 | if isinstance(node, ScalarNode): 98 | detected_tag = self.resolve(ScalarNode, node.value, (True, False)) 99 | default_tag = self.resolve(ScalarNode, node.value, (False, True)) 100 | implicit = (node.tag == detected_tag), (node.tag == default_tag) 101 | self.emit(ScalarEvent(alias, node.tag, implicit, node.value, 102 | style=node.style)) 103 | elif isinstance(node, SequenceNode): 104 | implicit = (node.tag 105 | == self.resolve(SequenceNode, node.value, True)) 106 | self.emit(SequenceStartEvent(alias, node.tag, implicit, 107 | flow_style=node.flow_style)) 108 | index = 0 109 | for item in node.value: 110 | self.serialize_node(item, node, index) 111 | index += 1 112 | self.emit(SequenceEndEvent()) 113 | elif isinstance(node, MappingNode): 114 | implicit = (node.tag 115 | == self.resolve(MappingNode, node.value, True)) 116 | self.emit(MappingStartEvent(alias, node.tag, implicit, 117 | flow_style=node.flow_style)) 118 | for key, value in node.value: 119 | self.serialize_node(key, node, None) 120 | self.serialize_node(value, node, key) 121 | self.emit(MappingEndEvent()) 122 | self.ascend_resolver() 123 | -------------------------------------------------------------------------------- /shared/data/yaml/tokens.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | __all__ = [ 4 | 'AliasToken', 'AnchorToken', 'BlockEndToken', 5 | 'BlockEntryToken', 'BlockMappingStartToken', 6 | 'BlockSequenceStartToken', 'DirectiveToken', 7 | 'DocumentEndToken', 'DocumentStartToken', 'FlowEntryToken', 8 | 'FlowMappingEndToken', 'FlowMappingStartToken', 9 | 'FlowSequenceEndToken', 'FlowSequenceStartToken', 'KeyToken', 10 | 'ScalarToken', 'StreamEndToken', 'StreamStartToken', 11 | 'TagToken', 'Token', 'ValueToken' 12 | ] 13 | 14 | 15 | class Token(object): 16 | def __init__(self, start_mark, end_mark): 17 | self.start_mark = start_mark 18 | self.end_mark = end_mark 19 | def __repr__(self): 20 | attributes = [key for key in self.__dict__ 21 | if not key.endswith('_mark')] 22 | attributes.sort() 23 | arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) 24 | for key in attributes]) 25 | return '%s(%s)' % (self.__class__.__name__, arguments) 26 | 27 | #class BOMToken(Token): 28 | # id = '' 29 | 30 | class DirectiveToken(Token): 31 | id = '' 32 | def __init__(self, name, value, start_mark, end_mark): 33 | self.name = name 34 | self.value = value 35 | self.start_mark = start_mark 36 | self.end_mark = end_mark 37 | 38 | class DocumentStartToken(Token): 39 | id = '' 40 | 41 | class DocumentEndToken(Token): 42 | id = '' 43 | 44 | class StreamStartToken(Token): 45 | id = '' 46 | def __init__(self, start_mark=None, end_mark=None, 47 | encoding=None): 48 | self.start_mark = start_mark 49 | self.end_mark = end_mark 50 | self.encoding = encoding 51 | 52 | class StreamEndToken(Token): 53 | id = '' 54 | 55 | class BlockSequenceStartToken(Token): 56 | id = '' 57 | 58 | class BlockMappingStartToken(Token): 59 | id = '' 60 | 61 | class BlockEndToken(Token): 62 | id = '' 63 | 64 | class FlowSequenceStartToken(Token): 65 | id = '[' 66 | 67 | class FlowMappingStartToken(Token): 68 | id = '{' 69 | 70 | class FlowSequenceEndToken(Token): 71 | id = ']' 72 | 73 | class FlowMappingEndToken(Token): 74 | id = '}' 75 | 76 | class KeyToken(Token): 77 | id = '?' 78 | 79 | class ValueToken(Token): 80 | id = ':' 81 | 82 | class BlockEntryToken(Token): 83 | id = '-' 84 | 85 | class FlowEntryToken(Token): 86 | id = ',' 87 | 88 | class AliasToken(Token): 89 | id = '' 90 | def __init__(self, value, start_mark, end_mark): 91 | self.value = value 92 | self.start_mark = start_mark 93 | self.end_mark = end_mark 94 | 95 | class AnchorToken(Token): 96 | id = '' 97 | def __init__(self, value, start_mark, end_mark): 98 | self.value = value 99 | self.start_mark = start_mark 100 | self.end_mark = end_mark 101 | 102 | class TagToken(Token): 103 | id = '' 104 | def __init__(self, value, start_mark, end_mark): 105 | self.value = value 106 | self.start_mark = start_mark 107 | self.end_mark = end_mark 108 | 109 | class ScalarToken(Token): 110 | id = '' 111 | def __init__(self, value, plain, start_mark, end_mark, style=None): 112 | self.value = value 113 | self.plain = plain 114 | self.start_mark = start_mark 115 | self.end_mark = end_mark 116 | self.style = style 117 | -------------------------------------------------------------------------------- /shared/tools/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Metatools and utilities to make Ignition interesting. 3 | """ 4 | 5 | 6 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 7 | __license__ = 'Apache 2.0' 8 | __maintainer__ = 'Andrew Geiger' 9 | __email__ = 'andrew.geiger@corsosystems.com' -------------------------------------------------------------------------------- /shared/tools/data.py: -------------------------------------------------------------------------------- 1 | """ 2 | Helper functions for interacting with data a bit easier. 3 | """ 4 | 5 | 6 | from com.inductiveautomation.ignition.common import BasicDataset 7 | from itertools import izip as zip 8 | import re, fnmatch, random, string 9 | 10 | 11 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 12 | __license__ = 'Apache 2.0' 13 | __maintainer__ = 'Andrew Geiger' 14 | __email__ = 'andrew.geiger@corsosystems.com' 15 | 16 | 17 | def chunks(l, n): 18 | """https://stackoverflow.com/a/1751478""" 19 | n = max(1, n) 20 | return (l[i:i+n] for i in xrange(0, len(l), n)) 21 | 22 | 23 | def unchunk(listOfLists): 24 | linList = [] 25 | numBuckets = len(listOfLists) 26 | maxDepth = max(len(subList) for subList in listOfLists) 27 | for i in range(maxDepth): 28 | for subList in listOfLists: 29 | try: 30 | linList.append(subList[i]) 31 | except IndexError: 32 | continue 33 | return linList 34 | 35 | 36 | def randomId(numLetters=10): 37 | wo = ''.join(random.choice(string.ascii_uppercase + string.digits) 38 | for i in range(numLetters)) 39 | hyphen = random.randint(1, numLetters - 1) 40 | return '%s-%s' % (wo[:hyphen], wo[hyphen:]) 41 | 42 | 43 | def datasetToListDict(dataset): 44 | """Converts a dataset into a list of dictionaries. 45 | Convenient to treat data on a row-by-row basis naturally in Python. 46 | 47 | >>> from shared.tools.examples import simpleDataset 48 | >>> datasetToListDict(simpleDataset) 49 | [{'a': 1, 'b': 2, 'c': 3}, {'a': 4, 'b': 5, 'c': 6}, {'a': 7, 'b': 8, 'c': 9}] 50 | """ 51 | header = [str(name) for name in dataset.getColumnNames()] 52 | try: 53 | return [dict(zip(header, row)) for row in zip(*dataset.data)] 54 | except: 55 | try: 56 | possibleGetters = ( 57 | 'getDelegateDataset', # Database 58 | 'toIgnitionDataset', # Sepasoft OEE 59 | ) 60 | for attr in possibleGetters: 61 | getter = getattr(dataset, attr, None) 62 | if getter: 63 | resolvedDataset = getter() 64 | break 65 | else: 66 | raise RuntimeError("Could not resolve dataset type.") 67 | 68 | return [dict(zip(header, row)) for row in zip(*resolvedDataset.data)] 69 | except: 70 | return [dict( (columnName, dataset.getValueAt(rix,columnName)) 71 | for columnName in header + [sdfs]) 72 | for rix in range(dataset.getRowCount()) ] 73 | 74 | 75 | def datasetToDictList(dataset): 76 | """Converts a dataset into a dictionary of column lists. 77 | Convenient for treating data on a specific-column basis. 78 | 79 | >>> from shared.tools.examples import simpleDataset 80 | >>> datasetToDictList(simpleDataset) 81 | {'a': [1, 4, 7], 'b': [2, 5, 8], 'c': [3, 6, 9]} 82 | """ 83 | header = [str(name) for name in dataset.getColumnNames()] 84 | return dict(zip( header, [dataset.getColumnAsList(i) for i in range(len(header))] )) 85 | 86 | 87 | def gatherKeys(data): 88 | """Gather all the possible keys in a list of dicts. 89 | (Note that voids in a particular row aren't too bad.) 90 | 91 | >>> from shared.tools.examples import complexListDict 92 | >>> gatherKeys(complexListDict) 93 | ['date', 'double', 'int', 'string'] 94 | """ 95 | keys = set() 96 | for row in data: 97 | keys.update(row) 98 | return sorted(list(keys)) 99 | 100 | 101 | def listDictToDataset(data, keys=None): 102 | """Converts a list of dictionaries into a dataset. 103 | A selection of keys can be requested (and reordered), where missing entries 104 | are filled with None values. 105 | 106 | >>> from shared.tools.pretty import p 107 | >>> from shared.tools.examples import simpleListDict 108 | >>> ld2ds = listDictToDataset(simpleListDict, keys=['c','b']) 109 | >>> p(ld2ds) 110 | "ld2ds" of 3 elements and 2 columns 111 | ============================================= 112 | c | b 113 | | 114 | -------------------------------------------------------- 115 | 0 | 3 | 2 116 | 1 | 6 | 5 117 | 2 | 9 | 8 118 | """ 119 | # gather the keys, in case there are voids in the data 120 | if not keys: 121 | keys = gatherKeys(data) 122 | 123 | columns = dict((key,[]) for key in keys) 124 | for row in data: 125 | for key in keys: 126 | columns[key].append( row.get(key, None) ) 127 | 128 | aligned = zip(*[columns[key] for key in keys]) 129 | 130 | return system.dataset.toDataSet(keys, aligned) 131 | 132 | 133 | def datasetColumnToList(dataset, columnName): 134 | """Get the entire column as a list.""" 135 | # optimized depending on dataset size 136 | if dataset.getRowCount() < 100: 137 | vals = [] 138 | for row in range(dataset.getRowCount()): 139 | val = dataset.getValueAt(row, columnName) 140 | vals.append(val) 141 | return vals 142 | else: 143 | cix = dataset.getColumnIndex(columnName) 144 | # convert to a proper python list 145 | return list(v for v in dataset.getColumnAsList(cix)) 146 | 147 | 148 | def filterDatasetWildcard(dataset, filters): 149 | """ 150 | Overview: 151 | Takes a dataset and returns a new dataset containing only rows that satisfy the filters 152 | Allows the use of a wildcard (*) and single character wildcards (?) 153 | Arguments: 154 | dataset - The original dataset to operate on 155 | filters - A dictionary where keys are column names, and values are the glob patterns 156 | that are checked for equivalency in the column specified by the key 157 | """ 158 | rowsToDelete = [] 159 | 160 | filtersIx = dict((dataset.getColumnIndex(columnName),pattern) 161 | for columnName, pattern 162 | in filters.items()) 163 | 164 | for rix in range(dataset.getRowCount()): 165 | for cix, pattern in filtersIx.items(): 166 | 167 | entry = dataset.getValueAt(rix, cix) 168 | 169 | # check each entry, removing the row on failed matches 170 | if not fnmatch.fnmatch(entry, pattern): 171 | rowsToDelete.append(rix) 172 | break 173 | 174 | return system.dataset.deleteRows(dataset, rowsToDelete) 175 | -------------------------------------------------------------------------------- /shared/tools/debug/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Debugger for Ignition Python scripts. 3 | """ 4 | 5 | 6 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 7 | __license__ = 'Apache 2.0' 8 | __maintainer__ = 'Andrew Geiger' 9 | __email__ = 'andrew.geiger@corsosystems.com' -------------------------------------------------------------------------------- /shared/tools/debug/_test.py: -------------------------------------------------------------------------------- 1 | """ 2 | A simple test script to exercise the tracer against. 3 | 4 | This launches a thread that is immediately interdicted by a tracer. 5 | The thread has just enough features to exercise and see the different 6 | events and ways to control/watch/manipulate program flow. 7 | 8 | A tracer reference is going to be added to the tag folder 9 | `[default]_Tracers/` and will accept commands from there. 10 | Otherwise get the tracer from ExtraGlobal and interact with the thread! 11 | 12 | As a starting hint, use tracer << 'help' to get more info on available commands. 13 | If the tracer is running on the gateway or on a remote client, 14 | the tag command line is the only way to control it. 15 | 16 | 17 | Test script (run from interactive console for best results!) 18 | 19 | >>> debug_thread = shared.tools.debug._test.launch_target_thread() 20 | >>> from shared.tools.debug.tracer import Tracer 21 | >>> shared.tools.pretty.install() 22 | >>> Tracer.tracers 23 | >>> tracer = Tracer.tracers[0] 24 | >>> tracer << 'help' 25 | >>> tracer.current_context 26 | """ 27 | 28 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 29 | __license__ = 'Apache 2.0' 30 | __maintainer__ = 'Andrew Geiger' 31 | __email__ = 'andrew.geiger@corsosystems.com' 32 | 33 | 34 | 35 | from shared.tools.thread import async, dangerouslyKillThreads 36 | from shared.tools.debug.tracer import set_trace 37 | from time import sleep 38 | 39 | 40 | RUNNING_THREAD_NAME = 'debug_test' 41 | 42 | TAG_CONTROL_FOLDER = '[default]_Tracers/' 43 | 44 | 45 | def launch_target_thread(test_thread_name=RUNNING_THREAD_NAME, tag_control_folder=TAG_CONTROL_FOLDER): 46 | 47 | dangerouslyKillThreads(test_thread_name, bypass_interlock='Yes, seriously.') 48 | 49 | @async(name=test_thread_name) 50 | def monitored(_tcf=tag_control_folder): 51 | close_loop = False 52 | 53 | set_trace(control_tag=_tcf) 54 | 55 | time_delay = 0.5 56 | find_me = 0 57 | 58 | some_dict = {"j": 43.21} 59 | 60 | def bar(x, steps=5): 61 | 62 | for y in range(steps): 63 | x += 1 64 | sleep(0.05) 65 | 66 | y = x * 2 67 | 68 | return x 69 | 70 | while True: 71 | find_me = bar(find_me, steps=2) 72 | 73 | print 'find_me: ', find_me 74 | sleep(time_delay) 75 | 76 | if close_loop: 77 | break 78 | 79 | try: 80 | if throw_error: 81 | x = 1/0 82 | except NameError: 83 | pass 84 | 85 | print 'Finished' 86 | 87 | return monitored() 88 | 89 | #target_thread = launch_target_thread() 90 | -------------------------------------------------------------------------------- /shared/tools/debug/_test_scenario.py: -------------------------------------------------------------------------------- 1 | #shared.tools.pretty.install() 2 | #from time import sleep 3 | #from shared.insitu import launch_target_thread 4 | #target_thread = launch_target_thread() 5 | #sleep(0.5) 6 | # 7 | #from shared.tools.debug.tracer import Tracer 8 | #tracer = Tracer.tracers[0] 9 | #tracer 10 | 11 | # print "Pausing before scenario..." 12 | # sleep(2.0) 13 | # print "initializing scenario:" 14 | from shared.tools.data import randomId 15 | from shared.tools.thread import async 16 | from shared.tools.debug.codecache import CodeCache 17 | import textwrap 18 | # So: build a new thread bootstrapped with the given code 19 | 20 | def fork_scenario(frame, backref='', sys_context=None): 21 | 22 | if not backref: 23 | backref = '<%s>' % randomId(6) 24 | 25 | source = CodeCache.get_lines(frame, radius=0, sys_context=sys_context) 26 | 27 | # frame lines are one-indexed 28 | frame_first_line_number = frame.f_code.co_firstlineno 29 | frame_first_line = source[frame_first_line_number - 1] 30 | 31 | spacer = frame_first_line[0] 32 | 33 | for indent_count, c in enumerate(frame_first_line): 34 | # zero-index means we end on the count 35 | if c != spacer: 36 | break 37 | 38 | # increase the indent by one since the frame is actually executed 39 | # inside the definition, not _on_ it. 40 | indent = spacer * (indent_count + 1) 41 | code_block = [] 42 | for line in source[frame_first_line_number:]: 43 | # add any lines that are the expected indent 44 | if line.startswith(indent): 45 | code_block.append(line) 46 | # once we're past the def statement, break if we dedent 47 | elif code_block: 48 | break 49 | 50 | while not code_block[-1].strip(): 51 | _ = code_block.pop(-1) 52 | 53 | head_code = [indent + line for line in """ 54 | from shared.tools.debug.tracer import set_trace 55 | set_trace() 56 | """.splitlines() if line] 57 | 58 | code_block = head_code + code_block 59 | 60 | #CodeCache._render_tabstops(code_block) 61 | code = compile(textwrap.dedent('\n'.join(code_block)), '' % backref, 'exec') 62 | 63 | argument_names = frame.f_code.co_varnames[:frame.f_code.co_argcount] 64 | 65 | scenario_locals = dict((arg_name, frame.f_locals[arg_name]) 66 | for arg_name in argument_names) 67 | 68 | scenario_globals = frame.f_globals.copy() 69 | #del scenario_globals['Tracer'] 70 | 71 | @async(name='Tracer Scenario - %s' % backref) 72 | def initialize_scenario(code=code, 73 | scenario_globals=scenario_globals, 74 | scenario_locals=scenario_locals): 75 | exec(code, scenario_globals, scenario_locals) 76 | 77 | return initialize_scenario() 78 | 79 | scenario_thread = fork_scenario(tracer.cursor_frame, tracer.id, tracer.sys) 80 | 81 | sleep(0.5) 82 | 83 | from shared.tools.thread import getThreadState, getThreadInfo 84 | scenario_thread.state 85 | getThreadState(scenario_thread).frame 86 | 87 | 88 | #h = lambda o, scenario_thread=scenario_thread: scenario_thread.holdsLock(o) -------------------------------------------------------------------------------- /shared/tools/debug/frame.py: -------------------------------------------------------------------------------- 1 | """ 2 | This is primarily a mixin to make keeping track of some utility functions easier 3 | """ 4 | 5 | 6 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 7 | __license__ = 'Apache 2.0' 8 | __maintainer__ = 'Andrew Geiger' 9 | __email__ = 'andrew.geiger@corsosystems.com' 10 | 11 | 12 | def strip_angle_brackets(internal_name): 13 | if internal_name.startswith('<') and internal_name.endswith('>'): 14 | return internal_name[1:-1] 15 | else: 16 | return internal_name 17 | 18 | 19 | def normalize_filename(filename): 20 | return strip_angle_brackets(filename) 21 | 22 | 23 | def iter_frames(frame): 24 | while frame: 25 | yield frame 26 | frame = frame.f_back 27 | 28 | 29 | def iter_frames_root(frame): 30 | stack = list(iter_frames(frame)) 31 | for frame in reversed(stack): 32 | yield frame 33 | 34 | 35 | def find_object(obj_name, frame): 36 | """Grab an item from the Python stack by its name, starting with the given frame.""" 37 | # if no shortcut is provided, start at the furthest point 38 | for frame in iter_frames(frame): 39 | if obj_name in frame.f_locals: 40 | return frame.f_locals[obj_name] 41 | return None 42 | 43 | 44 | def find_root_object(obj_name, frame): 45 | """Grab an item from the Python stack by its name, starting with the given frame.""" 46 | # if no shortcut is provided, start at the furthest point 47 | for frame in iter_frames_root(frame): 48 | if obj_name in frame.f_locals: 49 | return frame.f_locals[obj_name] 50 | return None 51 | 52 | -------------------------------------------------------------------------------- /shared/tools/debug/hijack.py: -------------------------------------------------------------------------------- 1 | """ 2 | The SysHijack is part of the secret sauce enabling the tracer 3 | to work at all. 4 | 5 | Long story short, Jython (rightly) makes it very hard to control 6 | a thread from the outside. By wrapping the Python system state, 7 | we can reliably gain access to the thread's state, meaning we 8 | can affect the thread from its own context. Or, put differently, 9 | it allows us to manipulate execution from an outside perspective. 10 | 11 | Without this, we are guaranteed to couple our thread with the inspecting 12 | thread, and then the inspecting thread utterly jams up ours 13 | while it waits for input. From us. Just bananas. 14 | 15 | Note that the master Py object takes advantage of the Java thread 16 | state to keep the system states organized. This is a really 17 | really good idea that is awesome. Except that also means anything 18 | we do to the sys object happens _to us_, _NOT_ the target thread. 19 | And it's a slippery one, so for safety we reaquire it on the spot. 20 | 21 | We're not going for speed here, but rather analytical power and, 22 | if possible, reliability. 23 | """ 24 | 25 | from shared.tools.thread import getThreadState, Thread 26 | from shared.tools.debug.proxy import ProxyIO 27 | 28 | from org.python.core import Py 29 | 30 | 31 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 32 | __license__ = 'Apache 2.0' 33 | __maintainer__ = 'Andrew Geiger' 34 | __email__ = 'andrew.geiger@corsosystems.com' 35 | 36 | 37 | class DefSysHijack(object): 38 | """The main SysHijack class. 39 | By adding a subclass, attribute resolution works reliably in the __getattr__ and __setattr__ overrides. 40 | """ 41 | 42 | __slots__ = ( 43 | '_target_thread', 44 | '_io_proxy', 45 | '__weakref__', 46 | ) 47 | 48 | def __init__(self, thread): 49 | self._target_thread = thread 50 | self._io_proxy = ProxyIO(hijacked_sys=self) 51 | self._install() 52 | 53 | 54 | def _install(self): 55 | """Redirect all I/O to proxy's endpoints""" 56 | self._io_proxy.install() 57 | 58 | def _restore(self): 59 | """Restore all I/O to original's endpoints""" 60 | self._io_proxy.uninstall() 61 | 62 | 63 | @property 64 | def _thread_state(self): 65 | """If we're in the same thread, we need to grab the state from the master Py object. 66 | Otherwise we rip it from the thread itself. 67 | We'll also want this to be calculated every call to ensure it's the correct reference. 68 | """ 69 | if Thread.currentThread() is self._target_thread: 70 | return Py.getThreadState() 71 | else: 72 | return getThreadState(self._target_thread) 73 | 74 | @property 75 | def _thread_sys(self): 76 | return self._thread_state.systemState 77 | 78 | 79 | # I/O proxy redirection 80 | # NOTE: This will not play well with other things attempting to hijack I/O 81 | # I think this is fair - only one bully per playground 82 | 83 | @property 84 | def stdin(self): 85 | if self._io_proxy.installed: 86 | return self._io_proxy.stdin 87 | else: 88 | return self._thread_sys.stdin 89 | @property 90 | def stdout(self): 91 | if self._io_proxy.installed: 92 | return self._io_proxy.stdout 93 | else: 94 | return self._thread_sys.stdout 95 | @property 96 | def stderr(self): 97 | if self._io_proxy.installed: 98 | return self._io_proxy.stderr 99 | else: 100 | return self._thread_sys.stderr 101 | @property 102 | def displayhook(self): 103 | if self._io_proxy.installed: 104 | return self._io_proxy.displayhook 105 | else: 106 | return self._thread_sys.displayhook 107 | 108 | 109 | def _getframe(self, depth=0): 110 | #print >>self.stdout, '[~] getting frame %d' % depth 111 | frame = self._thread_state.frame 112 | while depth > 0 and frame: 113 | depth -= 1 114 | frame = frame.f_back 115 | return frame 116 | 117 | 118 | def settrace(self, tracefunc=None): 119 | self._thread_sys.settrace(tracefunc) 120 | 121 | 122 | def setprofile(self, profilefunc=None): 123 | self._thread_sys.setprofile(None) 124 | 125 | 126 | # Context management 127 | 128 | def __enter__(self): 129 | self._install() 130 | return self 131 | 132 | def __exit__(self, exc_type, exc_val, exc_tb): 133 | self._restore() 134 | 135 | def __del__(self): 136 | self._restore() 137 | 138 | 139 | class SysHijack(DefSysHijack): 140 | """Capture a thread's system state and redirect it's standard I/O.""" 141 | 142 | # Override masking mechanic (the hijack) 143 | 144 | def __getattr__(self, attribute): 145 | """Get from this class first, otherwise use the wrapped item.""" 146 | try: 147 | return super(SysHijack, self).__getattr__(attribute) 148 | except AttributeError: 149 | return getattr(self._thread_sys, attribute) 150 | 151 | 152 | def __setattr__(self, attribute, value): 153 | """Set to this class first, otherwise use the wrapped item.""" 154 | try: 155 | super(SysHijack, self).__setattr__(attribute, value) 156 | except AttributeError: 157 | setattr(self._thread_sys, attribute, value) 158 | -------------------------------------------------------------------------------- /shared/tools/debug/proxy.py: -------------------------------------------------------------------------------- 1 | """ 2 | Proxy lets us inspect the standard in/out/err for a thread. 3 | 4 | This is helpful when a thread is started in a context where it is difficult 5 | to observe the I/O. For example, an async thread spun up on the gateway 6 | will write directly to the wrapper log via print, which is extremely 7 | inconvenient. 8 | 9 | The streams are also buffered, allowing us to review the I/O after the fact. 10 | """ 11 | 12 | from StringIO import StringIO 13 | from collections import deque 14 | from time import sleep 15 | from datetime import datetime 16 | 17 | try: 18 | from shared.tools.compat import next 19 | except ImportError: 20 | pass 21 | 22 | 23 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 24 | __license__ = 'Apache 2.0' 25 | __maintainer__ = 'Andrew Geiger' 26 | __email__ = 'andrew.geiger@corsosystems.com' 27 | 28 | 29 | class StreamBuffer(object): 30 | __slots__ = ('history', 31 | '_target_io', 32 | '_parent_proxy', 33 | '_buffer_line', 34 | '__weakref__', 35 | ) 36 | _MAX_HISTORY = 10000 37 | _BUFFER_CHUNK = 1000 38 | 39 | def __init__(self, target_io, parent_proxy=None): 40 | self._buffer_line = '' 41 | self.history = ['[%s] %s' % (str(datetime.now()), '#! Starting log...')] 42 | 43 | # Failsafe to drill past repeated inits 44 | while isinstance(target_io, StreamBuffer): 45 | target_io = target_io._target_io 46 | 47 | self._target_io = target_io 48 | self._parent_proxy = parent_proxy 49 | 50 | system.util.getLogger('StreamBuffer').debug(repr(self._target_io)) 51 | 52 | 53 | @property 54 | def parent(self): 55 | return self._parent_proxy 56 | 57 | 58 | def write(self, string): 59 | self._target_io.write(string) 60 | 61 | buffer = self._buffer_line + string 62 | timestamp = str(datetime.now()) 63 | ix = 0 64 | while '\n' in buffer: 65 | line, _, buffer = buffer.partition('\n') 66 | self.history.append('[%s] %s' % (timestamp, line)) 67 | self._buffer_line = buffer 68 | 69 | 70 | def writelines(self, iterable): 71 | self._target_io.writelines(iterable) 72 | 73 | timestamp = str(datetime.now()) 74 | for ix, line in enumerate(iterable): 75 | if ix == 0: 76 | line = self._buffer_line + line 77 | self._buffer_line = '' 78 | self.history.append('[%s %d] %s' % (timestamp, ix, line)) 79 | 80 | 81 | def __getattr__(self, attribute): 82 | """Get from this class first, otherwise use the wrapped item.""" 83 | try: 84 | return super(StreamBuffer, self).__getattr__(attribute) 85 | except AttributeError: 86 | return getattr(self._target_io, attribute) 87 | 88 | def __setattr__(self, attribute, value): 89 | """Set to this class first, otherwise use the wrapped item.""" 90 | try: 91 | return super(StreamBuffer, self).__setattr__(attribute, value) 92 | except AttributeError: 93 | return setattr(self._target_io, attribute, value) 94 | 95 | 96 | class ProxyIO(object): 97 | """Control the I/O""" 98 | 99 | __slots__ = ('_stdin', '_stdout', '_stderr', '_displayhook', 100 | '_original_displayhook', 101 | '_hijacked_sys', '_installed') 102 | 103 | def __init__(self, hijacked_sys=None): 104 | self._installed = False 105 | 106 | self._original_displayhook = None 107 | self._stdin = None 108 | self._stdout = None 109 | self._stderr = None 110 | self._displayhook = None 111 | 112 | self._hijacked_sys = hijacked_sys 113 | 114 | 115 | @property 116 | def installed(self): 117 | return self._installed 118 | 119 | @property 120 | def coupled_sys(self): 121 | return self._coupled_sys 122 | 123 | @property 124 | def last_input(self): 125 | return self.stdin.history[-1] 126 | 127 | @property 128 | def last_output(self): 129 | return self.stdout.history[-1] 130 | 131 | @property 132 | def last_error(self): 133 | return self.stderr.history[-1] 134 | 135 | 136 | @property 137 | def stdin(self): 138 | return self._stdin 139 | 140 | @property 141 | def stdout(self): 142 | return self._stdout 143 | 144 | @property 145 | def stderr(self): 146 | return self._stderr 147 | 148 | @property 149 | def displayhook(self): 150 | return self._displayhook 151 | 152 | @property 153 | def _coupled_sys(self): 154 | return self._hijacked_sys._thread_sys 155 | 156 | 157 | def install(self): 158 | self._original_displayhook = self._coupled_sys.displayhook 159 | self._displayhook = self._original_displayhook # shared.tools.pretty.displayhook 160 | 161 | self._stdin = StreamBuffer(self._coupled_sys.stdin, parent_proxy=self) 162 | self._stdout = StreamBuffer(self._coupled_sys.stdout, parent_proxy=self) 163 | self._stderr = StreamBuffer(self._coupled_sys.stderr, parent_proxy=self) 164 | 165 | self._coupled_sys.stdin = self.stdin 166 | self._coupled_sys.stdout = self.stdout 167 | self._coupled_sys.stderr = self.stderr 168 | self._coupled_sys.displayhook = self.displayhook 169 | 170 | self._installed = True 171 | 172 | 173 | def uninstall(self): 174 | if not self._installed: 175 | return 176 | 177 | self._coupled_sys.stdin = self._stdin._target_io 178 | self._coupled_sys.stdout = self._stdout._target_io 179 | self._coupled_sys.stderr = self._stderr._target_io 180 | self._coupled_sys.displayhook = self._original_displayhook 181 | 182 | self._installed = False 183 | 184 | 185 | # Context management 186 | 187 | def __enter__(self): 188 | self.install() 189 | return self 190 | 191 | def __exit__(self, exc_type, exc_val, exc_tb): 192 | self.uninstall() 193 | 194 | def __del__(self): 195 | """NOTE: This is NOT guaranteed to run, but it's a mild safeguard.""" 196 | self.uninstall() 197 | -------------------------------------------------------------------------------- /shared/tools/debug/snapshot.py: -------------------------------------------------------------------------------- 1 | """ 2 | Snapshots keep track of the tracer as it executes. 3 | 4 | Use the snapshot's context_buffer to look back on the history of the trace. 5 | Note that it may not be a perfect image! Execution frames update while the 6 | stack frame executes, and any objects that fails the deepcopy (or if it's 7 | not actively deepcopying for speed) may mutate as execution progresses. 8 | Moreover, Java objects are not subject to deepcopy, meaning that their 9 | references are merely passed along and saved. And so be forewarned. 10 | """ 11 | 12 | from copy import deepcopy 13 | 14 | from shared.tools.debug.frame import iter_frames 15 | 16 | 17 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 18 | __license__ = 'Apache 2.0' 19 | __maintainer__ = 'Andrew Geiger' 20 | __email__ = 'andrew.geiger@corsosystems.com' 21 | 22 | 23 | class Snapshot(object): 24 | 25 | __slots__ = ('_event', '_arg', '_frame', '_code', 26 | '_filename', '_line', '_caller', '_depth', 27 | '_locals_key', '_locals_dup', '_locals_ref', '_locals_err', 28 | '_cloned', 29 | '__weakref__',) 30 | 31 | _repr_markers = {'line': '|', 'call': '+', 'return': '/', 'exception': 'X', 32 | 'c_call': '+', 'c_return': '/', 'c_exception': 'X', 33 | 'init': '#'} 34 | 35 | def __init__(self, frame, event, arg, clone=True): 36 | 37 | 38 | self._event = event 39 | self._arg = arg 40 | self._frame = frame 41 | 42 | self._code = frame.f_code 43 | self._filename = frame.f_code.co_filename 44 | self._line = frame.f_lineno 45 | self._caller = frame.f_code.co_name 46 | self._depth = len(list(iter_frames(frame))) 47 | 48 | local_key = set() 49 | local_dup = {} 50 | local_ref = {} 51 | local_err = {} 52 | 53 | if clone: 54 | # attempt to make a deepcopy of each item, 55 | # note that Java and complex objects will fail deepcopy 56 | # and instead will be saved by reference only 57 | for key,value in frame.f_locals.items(): 58 | try: 59 | local_dup[key] = deepcopy(value) 60 | except Exception, err: 61 | local_ref[key] = value 62 | local_err[key] = err 63 | self._cloned = clone 64 | 65 | self._locals_key = local_key 66 | self._locals_dup = local_dup 67 | self._locals_ref = local_ref 68 | self._locals_err = local_err 69 | 70 | 71 | @property 72 | def event(self): 73 | return self._event 74 | @property 75 | def arg(self): 76 | return self._arg 77 | @property 78 | def frame(self): 79 | return self._frame 80 | @property 81 | def filename(self): 82 | return self._filename 83 | @property 84 | def line(self): 85 | return self._line 86 | @property 87 | def caller(self): 88 | return self._caller 89 | @property 90 | def depth(self): 91 | return self._depth 92 | @property 93 | def code(self): 94 | return self._code 95 | 96 | @property 97 | def cloned(self): 98 | return self._cloned 99 | @property 100 | def locals(self): 101 | return dict(self._locals_ref.items() + self._locals_dup.items()) 102 | @property 103 | def locals_uncloned(self): 104 | return self._locals_err.keys() 105 | 106 | @property 107 | def globals(self): 108 | raise NotImplementedError("Frame globals are not snapshot during execution.") 109 | @property 110 | def globals_uncloned(self): 111 | raise NotImplementedError("Frame globals are not snapshot during execution.") 112 | 113 | def back_context(self, arg=None, clone=False): 114 | if self._frame.f_back: 115 | return Snapshot(self._frame.f_back, 'backtrace', arg, clone) 116 | else: 117 | return None 118 | 119 | def __getitem__(self, key): 120 | """Get var from frame. Note that this has various guarantees depending on setup. 121 | 122 | If the frame locals were cloned, then it will first try to return the deepcopy 123 | version (to avoid mutation as frame evolves), then it'll fall back to a reference. 124 | If references were not cloned, the frame is directly referenced. Note that f_locals 125 | will mutate as the frame executes, so this is the least reliable way to see 126 | what is currently happening. 127 | """ 128 | if self._cloned: 129 | val = self._locals_dup.get(key) 130 | if val is None: 131 | return self._locals_ref.get(key) 132 | else: 133 | return val 134 | else: 135 | return self._frame.f_locals[key] 136 | 137 | 138 | def as_dict(self): 139 | props = 'event arg frame filename line caller local'.split() 140 | return dict((prop,getattr(self,prop)) for prop in props) 141 | 142 | 143 | def __repr__(self): 144 | tree_marker = [' ']*4 145 | tree_marker[len(tree_marker) % (self.depth)] = self._repr_markers.get(self.event, '*') 146 | return '' % (''.join(tree_marker), 147 | self.depth, self.event.capitalize()[:6], self.line, self.filename, self.caller) 148 | -------------------------------------------------------------------------------- /shared/tools/debug/trap.py: -------------------------------------------------------------------------------- 1 | """ 2 | Create traps for the tracer to monitor and trip into interdiction mode 3 | """ 4 | 5 | from shared.tools.expression import Expression, two_argument_operators 6 | 7 | 8 | from functools import wraps 9 | 10 | 11 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 12 | __license__ = 'Apache 2.0' 13 | __maintainer__ = 'Andrew Geiger' 14 | __email__ = 'andrew.geiger@corsosystems.com' 15 | 16 | 17 | def fail_false(function): 18 | """Decorates functions that on an exception simply return False""" 19 | @wraps(function) 20 | def false_on_error(*args, **kwargs): 21 | try: 22 | return function(*args, **kwargs) 23 | except: 24 | return False 25 | return false_on_error 26 | 27 | 28 | def resolve_field(context, field): 29 | return getattr(context, field, context[field]) 30 | 31 | 32 | 33 | class BaseTrap(object): 34 | __slots__ = ('__weakref__',) 35 | 36 | def __init__(self): 37 | raise NotImplementedError("Subclass for specific methods of trapping.") 38 | def check(self, context): 39 | raise NotImplementedError("Subclass for specific methods of trapping.") 40 | 41 | 42 | class WatchTrap(BaseTrap): 43 | """Use a function to run against the scope. If it's value is expected, return True. 44 | 45 | Function will be provided values from context that map to the argument names of it. 46 | """ 47 | __slots__ = ('function', 'expectation') 48 | 49 | def __init__(self, function, expectation=True): 50 | self.function = function 51 | self.expectation = expectation 52 | 53 | @fail_false 54 | def check(self, context): 55 | fc = self.function.f_code 56 | return self.function(*(self.resolve_field(context, field) 57 | for field in fc.co_varnames[:fc.co_argcount] 58 | )) == self.expectation 59 | 60 | 61 | class ExpressionTrap(BaseTrap): 62 | """Execute a (custom) compiled statement against the context. 63 | 64 | Expressions should reference variables in context. 65 | """ 66 | 67 | __slots__ = ('left', 'comparator', 'right') 68 | 69 | def __init__(self, expression, comparator='==', expected_result='True'): 70 | self.left = Expression(self.expression) 71 | self.comparator = two_argument_operators[comparator] 72 | self.right = Expression(self.expected_result) 73 | 74 | 75 | @fail_false 76 | def check(self, context): 77 | return self.comparator( 78 | self.left(*(self.resolve_field(context, field) 79 | for field 80 | in self.left._fields) ) , 81 | self.right(*(self.resolve_field(context, field) 82 | for field 83 | in self.right._fields) ) ) 84 | 85 | 86 | 87 | class ContextTrap(BaseTrap): 88 | """Return true if a context matches the preset values.""" 89 | __slots__ = ('context_values') 90 | 91 | def __init__(self, **context_values): 92 | self.context_values = context_values 93 | 94 | @fail_false 95 | def check(self, context): 96 | return all(self.resolve_field(context, field) == value 97 | for field, value 98 | in self.context_values.items()) 99 | 100 | 101 | class TransientTrap(BaseTrap): 102 | """These traps should be deleted once tripped.""" 103 | pass 104 | 105 | 106 | class Step(TransientTrap): 107 | """Trips on first check.""" 108 | def __init__(self): 109 | pass 110 | 111 | def check(self, context): 112 | return True 113 | 114 | 115 | class Next(TransientTrap): 116 | """Trips on the next line (or return) in the current scope.""" 117 | __slots__ = ('depth', 'filename', 'caller', 'line') 118 | 119 | def __init__(self, context): 120 | self.depth = context.depth 121 | self.filename = context.filename 122 | self.caller = context.caller 123 | 124 | def check(self, context): 125 | return all(( 126 | context.depth == self.depth, 127 | context.filename == self.filename, 128 | context.caller == self.caller, 129 | )) 130 | 131 | 132 | class Until(TransientTrap): 133 | """Trips on the next line greater than current (or return) in the current scope.""" 134 | __slots__ = ('depth', 'filename', 'caller', 'line') 135 | 136 | def __init__(self, context): 137 | self.depth = context.depth 138 | self.filename = context.filename 139 | self.caller = context.caller 140 | self.line = context.line 141 | 142 | def check(self, context): 143 | return all(( 144 | context.depth == self.depth, 145 | context.filename == self.filename, 146 | context.caller == self.caller, 147 | context.line > self.line or context.event == 'return', 148 | )) 149 | 150 | 151 | class Return(TransientTrap): 152 | """Trips when the function returns.""" 153 | def __init__(self, context): 154 | self.depth = context.depth 155 | self.filename = context.filename 156 | self.caller = context.caller 157 | 158 | def check(self, context): 159 | return all(( 160 | context.depth == self.depth, 161 | context.filename == self.filename, 162 | context.caller == self.caller, 163 | context.event == 'return', 164 | )) -------------------------------------------------------------------------------- /shared/tools/dictclass.py: -------------------------------------------------------------------------------- 1 | class DictPosingAsClass(object): 2 | """ 3 | Convert a dictionary to an object that's class-like. 4 | 5 | Enter expected fields in __slots__. 6 | 7 | Set _skip_undefined to True to ignore dict fields that 8 | are not in __slots__. 9 | 10 | Entries in _coerce_fields should be keys that are in __slots__ 11 | and values that are functions (or types). 12 | 13 | >>> dpac = DPAC(**some_dict) 14 | """ 15 | __slots__ = tuple() 16 | 17 | # True == exclusive to __slots__ 18 | # False == no error if extra args beyond what's in __slots__, just skipped 19 | _skip_undefined=False 20 | 21 | _coerce_fields = {} 22 | 23 | @classmethod 24 | def _nop(cls, x): 25 | return x 26 | def _coerce(self, key, value): 27 | return self._coerce_fields.get(key, self._nop)(value) 28 | 29 | def __init__(self, **kwargs): 30 | if self._skip_undefined: 31 | for key,value in kwargs.items(): 32 | try: 33 | self.__setitem__(key, value) 34 | except AttributeError: 35 | pass 36 | else: 37 | for key,value in kwargs.items(): 38 | self.__setitem__(key, value) 39 | 40 | def keys(self): 41 | ks = [] 42 | for key in self.__slots__: 43 | try: 44 | _ = getattr(self, key) 45 | ks.append(key) 46 | except AttributeError: 47 | pass 48 | return ks 49 | 50 | def values(self): 51 | vs = [] 52 | for key in self.__slots__: 53 | try: 54 | vs.append(getattr(self, key)) 55 | except AttributeError: 56 | pass 57 | return vs 58 | 59 | def __contains__(self, key): 60 | try: 61 | _ = getattr(self, key) 62 | return True 63 | except AttributeError: 64 | return False 65 | 66 | def __setitem__(self, key, val): 67 | setattr(self, key, self._coerce(key,val)) 68 | 69 | def __getitem__(self, key): 70 | if not key in self.__slots__: 71 | raise AttributeError('"%s" is not a key in __slots__' % key) 72 | try: 73 | return getattr(self, key) 74 | except AttributeError: 75 | return None 76 | 77 | def _asdict(self): 78 | d = {} 79 | for key in self.__slots__: 80 | try: 81 | v = getattr(self, key) 82 | d[key] = v._asdict() if isinstance(v, DictPosingAsClass) else v 83 | except AttributeError: 84 | pass 85 | return d 86 | 87 | def __repr__(self): 88 | return repr(self._asdict()) 89 | 90 | 91 | class DPAC_JSON(DictPosingAsClass): 92 | """An example of extending it for easier serializing""" 93 | @classmethod 94 | def _coerceToString(cls, thing): 95 | if isinstance(thing, DictPosingAsClass): 96 | return thing._asdict() 97 | if isinstance(thing, tuple): 98 | return list(thing) 99 | if isinstance(thing, (arrow.Arrow, datetime)): 100 | return thing.isoformat() 101 | return repr(thing) 102 | 103 | def __repr__(self): 104 | return rapidjson.dumps(self._asdict(), indent=2, default=self._coerceToString) 105 | 106 | 107 | 108 | 109 | 110 | -------------------------------------------------------------------------------- /shared/tools/differential.py: -------------------------------------------------------------------------------- 1 | from types import ModuleType 2 | 3 | import sys 4 | import re 5 | 6 | 7 | class Antikythera(ModuleType): 8 | """ 9 | This is a metaclass that sets up the mechanisms that allow 10 | the class to behave like a module. It is a module, but it 11 | will act like one here as well. 12 | 13 | Importantly, it will also inject itself into sys.modules 14 | correctly as well. This is important because the imp 15 | and __import__ mechanics will not do this correctly. 16 | """ 17 | 18 | def __new__(cls, clsname, bases, attrs): 19 | 20 | 21 | 22 | return cls 23 | 24 | 25 | def __getattr__(cls, attribute): 26 | if attribute in cls.overrides: 27 | return getattr(cls, attribute) 28 | elif attribute in cls.submodules: 29 | return cls.submodules[attribute] 30 | else: 31 | return super(MetaModule, cls).__getattr__(attribute) 32 | 33 | 34 | 35 | class AutoModule(object): 36 | """ 37 | Base class to subclass for mapping Java jar classes to behave 38 | like Python modules. It's meant to be mostly transparent and 39 | automagic. 40 | 41 | Provide a class_path_translation that will match the import path 42 | to a jar classpath. 43 | """ 44 | 45 | __metaclass__ = MetaModule 46 | 47 | _parent = None 48 | 49 | class_path_translation = re.compile(""" 50 | (?P.*) 51 | """, re.X) 52 | 53 | 54 | def __new__(cls): 55 | raise NotImplementedError("%s should be treated like a module. Please do not instantiate it." % cls.__name__) 56 | 57 | def __init__(cls): 58 | raise NotImplementedError("%s should be treated like a module. Please do not instantiate it." % cls.__name__) 59 | 60 | 61 | 62 | 63 | 64 | 65 | class opencv(Antikythera): 66 | 67 | class_path_translation = re.compile(""" 68 | """, re.X) 69 | 70 | 71 | -------------------------------------------------------------------------------- /shared/tools/dump.py: -------------------------------------------------------------------------------- 1 | """ 2 | Plonk objects to disk! 3 | """ 4 | 5 | 6 | import array.array, base64, os, re 7 | from shared.tools.meta import getDesignerContext 8 | 9 | 10 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 11 | __license__ = 'Apache 2.0' 12 | __maintainer__ = 'Andrew Geiger' 13 | __email__ = 'andrew.geiger@corsosystems.com' 14 | 15 | __all__ = ['serializeToXML', 'getResources', 'dumpProject'] 16 | 17 | 18 | def serializeToXML(obj, anchor=None): 19 | serializer = getSerializer(anchor) 20 | serializer.addObject(obj) 21 | return serializer.serializeXML() 22 | 23 | def getDeserializer(anchor = None): 24 | context = getDesignerContext(anchor) 25 | return context.createDeserializer() 26 | 27 | def getSerializer(anchor = None): 28 | context = getDesignerContext(anchor) 29 | return context.createSerializer() 30 | 31 | 32 | def resolveToObject(objBinary, failsafe=5): 33 | if not failsafe: 34 | return NotImplemented 35 | objects = [] 36 | 37 | deserializerContext = getDeserializer().deserialize(objBinary) 38 | rootObjects = deserializerContext.getRootObjects() 39 | 40 | for obj in rootObjects: 41 | if isinstance(obj, (str, unicode)): 42 | objects.append(obj) 43 | elif str(type(obj)).startswith(" is readonly" % self) 23 | setattr(cls, '__setattr__', __setattr__readonly__) 24 | 25 | 26 | class EnumValue(object): 27 | _parent = None 28 | _type = None 29 | 30 | # def __str__(self): 31 | # return '%s.%s' % (self._parent.__name__, self.__class__.__name__) 32 | 33 | def __repr__(self): 34 | return '<%s.%s %s>' % (self._parent.__name__, 35 | self.__class__.__name__, 36 | self._type.__str__(self)) 37 | 38 | 39 | 40 | class MetaEnum(type): 41 | 42 | _initFields = ('_fields', '_values') 43 | _class_initialized = False 44 | 45 | def __init__(cls, clsname, bases, attributes): 46 | 47 | super(MetaEnum, cls).__setattr__('_class_initialized', False) # bypass interlock 48 | 49 | fvs = [(key,value) for key, value in attributes.items() if not key.startswith('_')] 50 | 51 | if fvs: 52 | fields,values = zip(*sorted(fvs, key=lambda (key,value): value)) 53 | 54 | setattr(cls, '_fields', fields) 55 | setattr(cls, '_values', values) 56 | 57 | for key,value in fvs: 58 | EnumAttribute = MetaEnumValue(key, (EnumValue,type(value)), {'_parent': cls}) 59 | setattr(cls, key, EnumAttribute(value)) 60 | 61 | 62 | else: 63 | setattr(cls, '_fields', tuple()) 64 | setattr(cls, '_values', tuple()) 65 | 66 | cls._class_initialized = True 67 | 68 | 69 | def __setattr__(cls, key, value): 70 | if cls._class_initialized: 71 | raise AttributeError("<%s> attributes are readonly" % cls.__name__) 72 | else: 73 | super(MetaEnum, cls).__setattr__(key, value) 74 | 75 | def __contains__(cls, enum_key): 76 | return enum_key in cls._fields 77 | 78 | def keys(cls): 79 | return cls._fields 80 | 81 | def values(cls): 82 | return cls._values 83 | 84 | def __iter__(cls): 85 | return iter(getattr(cls, field) for field in cls._fields) 86 | 87 | def __getitem__(cls, attribute): 88 | return getattr(cls, attribute) 89 | 90 | def __str__(cls): 91 | return cls.__name__ 92 | 93 | def __repr__(cls): 94 | return "<%s {%s}>" % (cls.__name__, 95 | ', '.join("%s: %s" % (repr(field), repr(value)) 96 | for field, value in zip(cls._fields, cls._values))) 97 | 98 | 99 | class Enum(object): 100 | __metaclass__ = MetaEnum 101 | __slots__ = tuple() 102 | 103 | _fields = tuple() 104 | _values = tuple() 105 | 106 | def __new__(cls, value=None): 107 | if value is not None and value in cls._values: 108 | return getattr(cls, cls._fields[[i for i,v in enumerate(cls._values) if v == value][0]]) 109 | raise NotImplementedError("%s is an enumeration and does not support instantiation." % cls.__name__) 110 | 111 | def __init__(cls): 112 | raise NotImplementedError("%s is an enumeration and does not support instantiation." % cls.__name__) 113 | 114 | def __setattr__(cls, key, value): 115 | raise AttributeError("<%s> attributes are readonly" % cls.__name__) 116 | -------------------------------------------------------------------------------- /shared/tools/examples.py: -------------------------------------------------------------------------------- 1 | """ 2 | Examples for use with documentation or testing. 3 | """ 4 | 5 | 6 | from org.apache.commons.lang3.time import DateUtils 7 | from java.util import Date 8 | import random, string 9 | 10 | 11 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 12 | __license__ = 'Apache 2.0' 13 | __maintainer__ = 'Andrew Geiger' 14 | __email__ = 'andrew.geiger@corsosystems.com' 15 | 16 | 17 | fake_words = lambda x: ''.join(string.ascii_letters[random.randint(0,26*2-1)] for i in range(x)) 18 | 19 | simpleListList = [range(i,i+3) for i in range(1,9,3)] 20 | 21 | simpleDataset = system.dataset.toDataSet(list('abc'),simpleListList) 22 | 23 | complexDataset = system.dataset.toDataSet( 24 | ['string', 'int', 'double', 'date'], 25 | [ [ 'asdf' , 1 , 10.1 , DateUtils.addMinutes(Date() , 33) ] , 26 | [ 'qwer' , 3 , 12.3 , DateUtils.addHours(Date() , -102) ] , 27 | [ '1q2w3e4r' , -34 , 1000.000000000000000001 , DateUtils.setYears(Date() , 1999) ] , 28 | [ '' , None , -0.000 , Date() ] , 29 | ] ) 30 | 31 | simpleDictList = {'a': [1, 4, 7], 32 | 'b': [2, 5, 8], 33 | 'c': [3, 6, 9], 34 | } 35 | 36 | complexDictList = { 'date': [ DateUtils.addMinutes(Date(), 33), 37 | DateUtils.addHours(Date(), -102), 38 | DateUtils.setYears(Date(), 1999), 39 | Date(), 40 | ], 41 | 'string': [ 'asdf', 'qwer', '1q2w3e4r', '',], 42 | 'double': [10.1, 12.3, 1000.0, -0.0,], 43 | 'int': [1, 3, -34, None,] 44 | } 45 | 46 | simpleListDict = [{'a': 1, 'b': 2, 'c': 3}, 47 | {'a': 4, 'b': 5, 'c': 6}, 48 | {'a': 7, 'b': 8, 'c': 9}, 49 | ] 50 | 51 | complexListDict = [ { 'date': DateUtils.addMinutes(Date(), 33) , 52 | 'string': 'asdf', 53 | 'double': 10.1, 54 | 'int': 1 55 | }, 56 | { 'date': DateUtils.addHours(Date(), -102), 57 | 'string': 'qwer', 58 | 'double': 12.3, 59 | 'int': 3 60 | }, 61 | { 'date': DateUtils.setYears(Date(), 1999), 62 | 'string': '1q2w3e4r', 63 | 'double': 1000.0, 64 | 'int': -34 65 | }, 66 | { 'date': Date(), 67 | 'string': '', 68 | 'double': -0.0, 69 | 'int': None 70 | } ] 71 | 72 | extremelyHeterogenousData = {'asdf':234, 73 | 'we': [1,2,3,range(20),5.56,6,{'zxcv':5, 'k':'c'},8, simpleDataset], 74 | 'lkj': 999.334, 75 | None: {'a':[1,2,3], 'b': 'asdf', 'c': 3.4, 'Z': simpleDataset}} 76 | 77 | 78 | def generateFakeStringDataset(headings='a b c d'.split(), word_length=5, num_rows=100): 79 | return system.dataset.toDataSet( 80 | headings, 81 | [[fake_words(word_length) for x in range(len(headings))] 82 | for i in range(num_rows)]) 83 | -------------------------------------------------------------------------------- /shared/tools/expression.py: -------------------------------------------------------------------------------- 1 | # Stub for backward compatibility 2 | from shared.data.expression import Expression -------------------------------------------------------------------------------- /shared/tools/hotload.py: -------------------------------------------------------------------------------- 1 | """ 2 | Hoist a module at run time 3 | 4 | Not all code can run reasonably in a Venv block. For example, 5 | you can't use `from __future__ import with_statement` inside 6 | a function definition. But you _can_ execute it as a block. 7 | 8 | The following implements the `hotload` function which gets around that. 9 | """ 10 | 11 | from zipfile import ZipFile 12 | from StringIO import StringIO as BytesIO 13 | 14 | import sys, imp, os 15 | 16 | #namespace = 'hotload' 17 | 18 | 19 | def new_module(module_path): 20 | """Create and prime a new module for horking into the `sys.modules`""" 21 | mod = imp.new_module(module_path) 22 | mod.__file__ = module_path 23 | mod.__name__ = module_path 24 | mod.__package__ = module_path.rpartition('.')[0] 25 | return mod 26 | 27 | 28 | def setdefault_module(sys_context, module_path): 29 | """Helper to simplify gettering a module.""" 30 | if not module_path in sys_context.modules: 31 | module = new_module(module_path) 32 | sys_context.modules[module_path] = module 33 | else: 34 | module = sys_context.modules[module_path] 35 | return module 36 | 37 | 38 | def ensure_import_chain(target_module_path, sys_context=None): 39 | """ 40 | In order to chain module imports, child modules need to be 41 | an attribute of the parent. This ensures that every child 42 | has a parent, and that the parent knows who they are. Aww. 43 | """ 44 | # check for trivial chain of one 45 | # print 'Chaining %s' % target_module_path 46 | if not '.' in target_module_path: 47 | return 48 | 49 | module_path_parts = target_module_path.split('.') 50 | supporting_packages = ['.'.join(module_path_parts[0:i]) 51 | for i 52 | in range(1,len(module_path_parts))] 53 | # prime the loop 54 | package_chain = supporting_packages[:] 55 | package_chain.append(target_module_path) 56 | 57 | # chain the child connections 58 | parent_module = setdefault_module(sys_context, package_chain[0]) 59 | for module_path in package_chain[1:]: 60 | child_module = setdefault_module(sys_context, module_path) 61 | child_module_name = module_path.rpartition('.')[2] 62 | setattr(parent_module, child_module_name, child_module) 63 | parent_module = child_module 64 | 65 | 66 | 67 | def hotload(module_zip_binary, global_context=None, namespace='', sys_context=None, force_replace=False): 68 | """ 69 | Load a zip file's python code as a module at runtime. 70 | 71 | Note that the folder path is assumed to be the module paths. 72 | This also does NOT use any optimizations. Use this strictly 73 | for runtime monkey patching/loading. 74 | 75 | This simply executes the Python files and stores the namespace 76 | as a new module in `sys.modules`. 77 | """ 78 | if global_context is None: 79 | global_context = {} 80 | else: 81 | # don't mutate the original 82 | global_context = global_context.copy() 83 | 84 | if sys_context is None: 85 | import sys as sys_context 86 | 87 | module_binary_io = BytesIO(module_zip_binary) 88 | module_files = ZipFile(module_binary_io, 'r') 89 | 90 | module_code = {} 91 | 92 | for z in module_files.infolist(): 93 | module_path_parts = z.filename.split('/') 94 | if namespace: 95 | module_path_parts = namespace.split('.') + module_path_parts 96 | module_path = '.'.join(module_path_parts) 97 | 98 | # only compile python code, then trim the extension to get the path 99 | if not module_path.endswith('.py'): 100 | continue 101 | module_path = module_path[:-3] 102 | 103 | # have Python build the module 104 | raw_code = module_files.read(z.filename) 105 | local_context = global_context.copy() 106 | code = compile(raw_code, '' % module_path, 'exec') 107 | module_code[module_path] = code 108 | 109 | remaining = set(module_code) 110 | 111 | if force_replace: 112 | for module_path in remaining: 113 | if module_path in sys_context.modules: 114 | del sys_context.modules[module_path] 115 | 116 | while remaining: 117 | 118 | for module_path in remaining: 119 | try: 120 | local_context = { 121 | '__name__': module_path, 122 | '__file__': module_path, 123 | } 124 | code = module_code[module_path] 125 | # pass local_context as both global and local dict 126 | # that way as it executes, nested functions can access the global scope 127 | result = eval(code, local_context) 128 | 129 | # Should we try to load them out of order, just move on 130 | except ImportError, err: 131 | #print '== Compile error: %s --> %r' % (module_path, err) 132 | continue 133 | 134 | #print 'Module compiled: %s' % module_path 135 | 136 | # create and construct module 137 | module = new_module(module_path) 138 | for name, obj in local_context.items(): 139 | setattr(module, name, obj) 140 | 141 | # apply to sys and cache for cross-linking 142 | sys_context.modules[module_path] = module 143 | 144 | ensure_import_chain(module_path, sys_context) 145 | 146 | remaining.remove(module_path) 147 | break 148 | else: 149 | raise ImportError("All modules could not be loaded!\nLast error in %s: %r" % (module_path, err)) 150 | 151 | # clean up to ensure packages are handled 152 | packages = [mp for mp in module_code if '%s.__init__' % mp in module_code] 153 | for module_path in module_code: 154 | if '%s.__init__' % mp in module_code: 155 | sys.modules[module_path].__path__ = module_path 156 | else: 157 | sys.modules[module_path].__package__ = module_path.rpartition('.')[0] 158 | 159 | 160 | 161 | 162 | 163 | from java.net import URLClassLoader, URL 164 | from java.io import File 165 | from jarray import array 166 | from java.lang import Class as JavaClass 167 | from java.lang import Exception as JavaException, NoClassDefFoundError 168 | from java.util.jar import JarFile 169 | 170 | 171 | 172 | class JarClassLoader(object): 173 | """Load in jars via an alternate method from just injecting into sys.path 174 | 175 | Jython can natively load jar libraries very easily, but occasionally has weird 176 | trouble loading inner classes (or something). At the least, grinding through and 177 | manually injecting the class objects seems to help. 178 | 179 | An attempt was made to make the class loading lazy, but the import mechanics 180 | do not seem to resolve properties on load, returning the property instead 181 | of the class itself. Oh well. 182 | """ 183 | def __init__(self, jar_file_path): 184 | self.jar_file_path = jar_file_path 185 | self.urlLoader = URLClassLoader(array([File(jar_file_path).toURI().toURL()], URL)) 186 | self.jarFile = JarFile(jar_file_path) 187 | 188 | self.class_list = set( 189 | str(class_name)[:-6].replace('/', '.') 190 | for class_name 191 | in self.jarFile.entries() 192 | if str(class_name).endswith('.class') 193 | ) 194 | 195 | self.packages = {} 196 | for class_path in self.class_list: 197 | package_path, _, class_name = class_path.rpartition('.') 198 | if not package_path in self.packages: 199 | self.packages[package_path] = set() 200 | self.packages[package_path].add(class_name) 201 | 202 | self.class_cache = {} 203 | 204 | self.inject_sys() 205 | 206 | def inject_sys(self): 207 | for package_path in sorted(self.packages): 208 | module = imp.new_module(package_path) 209 | module.__file__ = os.path.join(self.jar_file_path, package_path) 210 | module.__name__ = package_path 211 | module.__package__ = '.'.join(package_path.split('.')[:1]) # I forget why this is needed... 212 | 213 | for class_name in self.packages[package_path]: 214 | class_name = class_name.partition('$')[0] 215 | setattr(module, class_name, self.get_class(package_path, class_name)) 216 | 217 | sys.modules[package_path] = module 218 | 219 | 220 | def get_class(self, package_path, class_name): 221 | if not (package_path, class_name) in self.class_cache: 222 | loaded_class = self.urlLoader.loadClass(package_path + '.' + class_name) 223 | self.class_cache[(package_path, class_name)] = loaded_class 224 | return self.class_cache[(package_path, class_name)] 225 | 226 | 227 | 228 | def jar_class_grind(jar_paths): 229 | for jar_path in jar_paths: 230 | _ = JarClassLoader(jar_path) -------------------------------------------------------------------------------- /shared/tools/hotpatch.py: -------------------------------------------------------------------------------- 1 | """ 2 | Lock Out - Tag Out 3 | Hotpatch access to even the most inappropriate things. 4 | 5 | WARNING: It can NOT be emphasized enough how much 6 | this breaks the class model contracts of 7 | Java itself. 8 | 9 | Use this for monkey-patching things in the most impatient way. 10 | """ 11 | from shared.tools.logging import Logger 12 | 13 | from java.lang import Class as JavaClass, Object as JavaObject, NoSuchFieldException 14 | from java.lang.reflect import Field, Modifier 15 | 16 | _FieldModifiers = Field.getDeclaredField('modifiers') 17 | 18 | 19 | class LOTO(object): 20 | """Context manager to safely adjust field properties. 21 | 22 | WARNING: this is "as possible", not _actually_ safe. 23 | Using this is safer because it will automatically 24 | put the field back to it's original configuration. 25 | 26 | NOTE: If the same flag is set for both enable and disable, the 27 | enabled flag will win. 28 | """ 29 | def __init__(self, obj, field_name_to_unlock, 30 | flags_to_disable=0, flags_to_enable=0): 31 | 32 | Logger().info('init...') 33 | 34 | # check if we're targetting the class or an instance of it 35 | if isinstance(obj, JavaClass): 36 | self.instance = None 37 | self.clazz = obj 38 | else: 39 | self.instance = obj 40 | self.clazz = obj.getClass() 41 | 42 | self.field_name = field_name_to_unlock 43 | self.field = None 44 | 45 | self.resolve_field() 46 | 47 | self.flags_to_disable = flags_to_disable 48 | self.flags_to_enable = flags_to_enable 49 | 50 | self.originally_accessible = self.field.isAccessible() 51 | self.original_modifiers = self.field.getModifiers() 52 | 53 | 54 | # context management 55 | def __enter__(self): 56 | self.unlock() 57 | # return self 58 | if self.instance: 59 | return self.field.get(self.instance) 60 | else: 61 | return self.field.get(self.clazz) 62 | 63 | def __exit__(self, ex_type, ex_value, ex_traceback): 64 | self.relock() 65 | 66 | 67 | def resolve_field(self): 68 | 69 | Logger().info('resolving field...') 70 | 71 | clazz = self.clazz 72 | field = None 73 | i = 10 74 | while i and clazz is not JavaObject: 75 | try: 76 | field = clazz.getDeclaredField(self.field_name) 77 | except NoSuchFieldException: 78 | Logger().info('... %(clazz)r') 79 | clazz = clazz.getSuperclass() 80 | 81 | i-=1 82 | Logger().info('...[%(i)d] ended on %(clazz)r...') 83 | if field is None: 84 | raise AttributeError('Field %r not found in the class inheritance structure of %r!' % (self.field_name, self.clazz,)) 85 | 86 | Logger().info('field: %(field)r') 87 | 88 | self.field = field 89 | 90 | def unlock(self): 91 | 92 | Logger().info('Unlocking...') 93 | 94 | # allow field modifications 95 | _FieldModifiers.setAccessible(True) 96 | 97 | # unlock for modification 98 | if not self.originally_accessible: 99 | self.field.setAccessible(True) 100 | 101 | # modify! 102 | new_modifiers = self.original_modifiers 103 | new_modifiers &= ~self.flags_to_disable 104 | new_modifiers |= self.flags_to_enable 105 | _FieldModifiers.setInt(self.field, new_modifiers) 106 | 107 | Logger().info('Done...') 108 | 109 | 110 | def relock(self): 111 | try: 112 | # allow field modifications 113 | _FieldModifiers.setAccessible(True) 114 | 115 | # revert modifications 116 | _FieldModifiers.setInt(self.field, self.original_modifiers) 117 | 118 | # relock, if needed 119 | if self.field.isAccessible() != self.originally_accessible: 120 | self.field.setAccessible(self.originally_accessible) 121 | finally: 122 | # resume lockout 123 | _FieldModifiers.setAccessible(False) 124 | 125 | 126 | 127 | 128 | #The Modifier flags available (and their enum value as of writing this) 129 | # ABSTRACT 1024 130 | # FINAL 16 131 | # INTERFACE 512 132 | # NATIVE 256 133 | # PRIVATE 2 134 | # PROTECTED 4 135 | # PUBLIC 1 136 | # STATIC 8 137 | # STRICT 2048 138 | # SYNCHRONIZED 32 139 | # TRANSIENT 128 140 | # VOLATILE 64 141 | 142 | for name in dir(Modifier): 143 | if name == name.upper(): 144 | value = getattr(Modifier, name) 145 | if isinstance(value, int): 146 | setattr(LOTO, name, value) -------------------------------------------------------------------------------- /shared/tools/macro.py: -------------------------------------------------------------------------------- 1 | """ 2 | Mark functions as recordable / dry-run-able 3 | 4 | Putting the @Recordable decorator on a function allows you to 5 | use a function and record _how_ you used it, or also to interdict 6 | the function so it behaves as a stub. 7 | 8 | For example, if you have a function that performs work on disk, 9 | and you run the script and it seems to work - but you don't want to 10 | do it again - then you can simply use the context manager `LogMacro` 11 | and interdict that function. Run the script exactly as before, but now 12 | the function will _not_ be called, but you'll have all the arguments 13 | that go into it. 14 | 15 | There's an oddly limited number of situations where this is exceptionally 16 | helpful, but when you need a dry-run, this can help make that happen. 17 | """ 18 | from shared.tools.meta import PythonFunctionArguments 19 | 20 | import os 21 | import traceback 22 | from collections import defaultdict 23 | from functools import wraps 24 | from java.lang import Thread 25 | from weakref import WeakKeyDictionary 26 | 27 | 28 | 29 | class MetaRecordable(type): 30 | 31 | record = WeakKeyDictionary() 32 | 33 | # interdiction only works per thread 34 | _interdiction_threads = WeakKeyDictionary() 35 | 36 | 37 | def __call__(cls, function): 38 | 39 | pfa = PythonFunctionArguments(function) 40 | function_argument_names = pfa.args 41 | 42 | @wraps(function) 43 | def wrapped_function(*args, **kwargs): 44 | 45 | interdict = False 46 | 47 | if cls.record: 48 | 49 | all_args = dict(zip(function_argument_names, args)) 50 | all_args.update(kwargs) 51 | 52 | for entry in cls.record: 53 | try: 54 | cls.record[entry].append((function, all_args)) 55 | except AttributeError: 56 | pass # recording set up wrong for this entry: should be a list! 57 | 58 | if cls._interdiction_threads.get(entry) is Thread.currentThread(): 59 | interdict = True 60 | 61 | if not interdict: 62 | results = function(*args, **kwargs) 63 | return results 64 | 65 | return None 66 | 67 | return wrapped_function 68 | 69 | 70 | @classmethod 71 | def __getitem__(metacls, key): 72 | return metacls.record[key] 73 | 74 | 75 | @classmethod 76 | def register(metacls, key, default=None, interdict=False): 77 | if default is not None: 78 | assert isinstance(default, list) 79 | metacls.record[key] = default 80 | else: 81 | metacls.record[key] = [] 82 | 83 | if interdict: 84 | metacls._interdiction_threads[key] = Thread.currentThread() 85 | 86 | @classmethod 87 | def unregister(metacls, key): 88 | for weakdict in (metacls.record, metacls._interdiction_threads): 89 | try: 90 | del weakdict[key] 91 | except KeyError: 92 | pass # mission already accomplished 93 | 94 | 95 | 96 | class Recordable(object): 97 | __metaclass__ = MetaRecordable 98 | 99 | 100 | 101 | class LogMacro(object): 102 | 103 | def __init__(self, interdict=True): 104 | self.interdict = interdict 105 | self.log = [] 106 | 107 | def __enter__(self): 108 | Recordable.register(self, default=self.log, interdict=self.interdict) 109 | return self 110 | 111 | def __exit__(self, ex_type, ex_value, ex_traceback): 112 | Recordable.unregister(self) 113 | 114 | 115 | #from shared.tools.pretty import p,pdir,install; install() 116 | #from shared.tools.macro import Recordable, LogMacro 117 | # 118 | # 119 | #@Recordable 120 | #def foo(a, b, c=3, **kwargs): 121 | # print a,b,c, kwargs 122 | # 123 | #print 'by itself' 124 | #foo(1,2) 125 | # 126 | #print 'recording macro and interdicting' 127 | #with LogMacro() as lm: 128 | # 129 | # foo(1,2) 130 | # 131 | # foo(b=2, a=1, c=5, d=44) 132 | # 133 | # p(lm.log) 134 | # 135 | #print 'And show that the record is cleared after the context manager closes' 136 | #Recordable.record -------------------------------------------------------------------------------- /shared/tools/memoize.py: -------------------------------------------------------------------------------- 1 | """ 2 | Memoize functions 3 | 4 | memoize - remember results for any particular (hashable) inputs 5 | memoize_for_call_stack - same, but only within the same function call 6 | (this is a uniquely Jython appropriate thing, 7 | since one thread may execute many Python contexts) 8 | 9 | """ 10 | 11 | import sys 12 | from datetime import datetime, timedelta 13 | from functools import wraps 14 | 15 | 16 | def memoize(function): 17 | """Memoize outputs. 18 | 19 | Note that this does _not_ expire the cache, so switch to the 20 | shared.tools.cache decorator if occasional culling is needed. 21 | """ 22 | @wraps(function) 23 | def wrapped_function(*args, **kwargs): 24 | # convert kwargs to args (with defaults), if any, since we can't key on a dict 25 | if kwargs: 26 | key_args = args + tuple( # default or kwargs 27 | kwargs.get(arg_name, default) 28 | for default, arg_name 29 | in zip( 30 | function.func_defaults, 31 | function.func_code.co_varnames[len(args):function.func_code.co_argcount], 32 | ) 33 | ) 34 | else: 35 | key_args = args 36 | 37 | try: # to generate the hash of the args 38 | memo_key = hash(key_args) 39 | # failsafe and simply pass to function 40 | except TypeError: 41 | return function(*args, **kwargs) 42 | 43 | # if not memo_key in cache, calculate and add it 44 | if not memo_key in wrapped_function.memo_cache: 45 | value = function(*args, **kwargs) 46 | wrapped_function.memo_cache[memo_key] = value 47 | 48 | return wrapped_function.memo_cache[memo_key] 49 | 50 | wrapped_function.memo_cache = {} 51 | 52 | return wrapped_function 53 | 54 | 55 | 56 | def root_stack_frame(max_depth = 100): 57 | for size in range(2, max_depth): 58 | try: 59 | frame = sys._getframe(size) 60 | size += 1 61 | except ValueError: 62 | return frame 63 | else: 64 | raise RuntimeError('Failed to find root stack') 65 | 66 | 67 | #CALLSTACK_CACHE_EXPIRATION = timedelta(minutes=10) 68 | CALLSTACK_CACHE_EXPIRATION = timedelta(seconds=10) 69 | 70 | 71 | def memoize_for_call_stack(function): 72 | """Memoize outputs, but only within the callstack's context. 73 | 74 | This will leave the last values in the cache potentially, 75 | so there's an implicit assumption that this is getting called 76 | frequently. 77 | """ 78 | @wraps(function) 79 | def memoized_call(*args, **kwargs): 80 | context = root_stack_frame() 81 | 82 | now = datetime.now() 83 | 84 | # check for any expired entries first 85 | expired = [] 86 | for c_key, t_out in memoized_call.timeout.items(): 87 | if now - t_out > CALLSTACK_CACHE_EXPIRATION: 88 | expired.append(c_key) 89 | # ... and cull them 90 | for c_key in expired: 91 | del memoized_call.cache[c_key] 92 | del memoized_call.timeout[c_key] 93 | 94 | # init the cache for this callstack 95 | if context not in memoized_call.cache: 96 | memoized_call.cache[context] = {} 97 | memoized_call.timeout[context] = now 98 | 99 | # in the event something unhashable was sent, this'll failsafe 100 | key_state = 0 101 | try: 102 | memo_key = arg_hash(args, kwargs) 103 | if memo_key not in memoized_call.cache[context]: 104 | key_state = 1 105 | else: 106 | key_state = 2 107 | except TypeError: 108 | key_state = 0 109 | 110 | # failsafe or cache the result if needed... 111 | if key_state == 0: 112 | # failsafe and simply run the function 113 | return function(*args, **kwargs) 114 | elif key_state == 1: 115 | # cache the fresh entry 116 | memoized_call.cache[context][memo_key] = function(*args, **kwargs) 117 | # ... and return the result 118 | return memoized_call.cache[context][memo_key] 119 | 120 | memoized_call.cache = {} 121 | memoized_call.timeout = {} 122 | 123 | return memoized_call 124 | 125 | 126 | 127 | 128 | def _run_example(): 129 | from time import sleep 130 | 131 | @memoize_for_call_stack 132 | def do_thing(x, y=3): 133 | print '<< direct call: ', x, y 134 | return x + y 135 | 136 | print '=== no defaults ===' 137 | do_thing(10) 138 | do_thing(10) 139 | print '=== all args ===' 140 | do_thing(10,6) 141 | do_thing(10,6) 142 | print '=== args kwargs ===' 143 | do_thing(10, y=34) 144 | do_thing(10, y=34) 145 | print '=== all kwargs ===' 146 | do_thing(x=2, y=10) 147 | do_thing(x=2, y=10) 148 | 149 | print 'testing timeout' 150 | sleep(11) 151 | print 'after timeout' 152 | do_thing(10) 153 | print 'testing failsafe' 154 | do_thing([1,2],[3,4]) 155 | -------------------------------------------------------------------------------- /shared/tools/net.py: -------------------------------------------------------------------------------- 1 | """ 2 | A few simple quick wrappers for commonly asked network questions 3 | """ 4 | 5 | import socket 6 | import telnetlib 7 | import uuid 8 | from time import sleep 9 | 10 | 11 | LOCALHOST_HOSTNAME = '' 12 | LOCALHOST_IP = '127.0.0.1' 13 | LOCALHOST_DNS = 'localhost' 14 | 15 | 16 | def default_hostname(): 17 | """Returns the default hostname for the JVM's host.""" 18 | try: 19 | return socket.gethostname() 20 | except: 21 | return LOCALHOST_HOSTNAME 22 | 23 | 24 | def default_ip(): 25 | """Returns the default IP address of the JVM's host.""" 26 | try: 27 | return socket.gethostbyname( 28 | socket.gethostname() 29 | ) 30 | except: 31 | return LOCALHOST_IP 32 | 33 | 34 | def default_dns_name(): 35 | """Returns the/a default network name of the JVM's host.""" 36 | try: 37 | return socket.gethostbyaddr( 38 | socket.gethostbyname( 39 | default_hostname() 40 | ) 41 | )[0] 42 | except: 43 | return LOCALHOST_DNS 44 | 45 | 46 | def gateway_name(): 47 | """Returns the gateway's configured name""" 48 | return str(system.tag.read('[System]Gateway/SystemName').value) 49 | 50 | 51 | def is_reachable(hostname, port): 52 | return bool(poke(hostname, port)) 53 | 54 | 55 | def poke(hostname, port, timeout=1.0, from_hostname='', from_port=0): 56 | """From THIS JVM check if the hostname:port is reachable. 57 | 58 | Fairly naive "throw a 'GET /' at it approach, but even an error helps. 59 | 60 | If returns True, the connection can _potentially_ be established. 61 | If it returns False, the connection _probably_ doesn't work. 62 | 63 | Note that firewalls, routing, and all manner of odd edge cases can 64 | make this confusing to interpret; this function merely tries "can TCP work *AT ALL*?" 65 | """ 66 | try: 67 | sock = socket.create_connection( 68 | address=(hostname, port), 69 | timeout=timeout, 70 | source_address=(from_hostname, from_port) 71 | ) 72 | _ = sock.send('GET /') 73 | except: 74 | return None 75 | finally: 76 | try: 77 | sock.shutdown(socket.SHUT_RDWR) 78 | sock.close() 79 | except: 80 | pass 81 | return True 82 | 83 | 84 | def poke_from_gateway(hostname, ports=tuple(), timeout=1.0, 85 | source_tag_provider='default', 86 | poke_module='shared.tools.net', ): 87 | """Ask what the gateway can see. We do this by forcing gateway JVM context via tag script. 88 | 89 | Returns a dict of ports and True/False for each poke. 90 | """ 91 | 92 | assert isinstance(hostname, str), 'hostname is a string, like "127.0.0.1" or "localhost"' 93 | 94 | random_test_id = str(uuid.uuid4()) 95 | 96 | if isinstance(ports, int): 97 | ports = (ports,) 98 | elif isinstance(ports, str): 99 | if ',' in ports: 100 | ports = tuple(int(port.strip()) for port in ports) 101 | else: 102 | ports = (int(ports.strip()),) 103 | 104 | assert all(isinstance(port, int) for port in ports), 'Ports must be numeric, like 80 or "80, 443"' 105 | 106 | added_folders = [] 107 | 108 | tag_provider = '[%s]' % (source_tag_provider,) 109 | temp_tag_folder = 'TEMP/port-tests/%s' % (random_test_id,) 110 | base_folders = [] 111 | base_path_parts = [] 112 | for name in temp_tag_folder.split('/'): 113 | target_base_path = tag_provider + '/'.join(base_path_parts) 114 | target_folder_path = tag_provider + '/'.join(base_path_parts + [name]) 115 | if not system.tag.exists(target_folder_path): 116 | system.tag.configure(target_base_path, [{'tagType': 'Folder', 'name': name}]) 117 | added_folders.append(target_folder_path) 118 | 119 | base_path_parts.append(name) 120 | 121 | temp_tag_folder = tag_provider + '/'.join(base_path_parts) 122 | 123 | tag_configs = [] 124 | tag_paths = [] 125 | for port in ports: 126 | tag_name = 'port-%d' % (port,) 127 | tag_path ='%s/%s' % (temp_tag_folder, tag_name) 128 | tag_paths.append(tag_path) 129 | 130 | tag_config = { 131 | 'dataType': 'Boolean', 132 | 'name': tag_name, 133 | # 'path': tag_path, 134 | 'tagType': 'AtomicTag', 135 | 'valueSource': 'memory', 136 | 'eventScripts': [{ 137 | 'eventid': 'valueChanged', 138 | 'script': """ 139 | from %(poke_module)s import poke 140 | 141 | result = poke(%(hostname)r, %(port)r, %(timeout)r) 142 | 143 | system.tag.writeBlocking([tagPath], [result]) 144 | """ % locals(), 145 | }], 146 | } 147 | 148 | tag_configs.append(tag_config) 149 | 150 | system.tag.configure(temp_tag_folder, tag_configs) 151 | 152 | sleep(timeout + 0.5) 153 | 154 | results = [tvq.value for tvq in system.tag.readAll(tag_paths)] 155 | 156 | # clean up mess 157 | for folder_path in reversed(sorted(added_folders)): 158 | system.tag.deleteTags([folder_path]) 159 | 160 | return dict( 161 | (port, result) 162 | for port, result 163 | in zip(ports, results) 164 | ) 165 | 166 | #poke_from_gateway('127.0.0.1', [80, 443, 8088, 8089, 8043, 8060]) 167 | -------------------------------------------------------------------------------- /shared/tools/overwatch.py: -------------------------------------------------------------------------------- 1 | """ 2 | Watch a program as it runs. Even after its definition... 3 | 4 | """ 5 | 6 | 7 | import sys 8 | 9 | 10 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 11 | __license__ = 'Apache 2.0' 12 | __maintainer__ = 'Andrew Geiger' 13 | __email__ = 'andrew.geiger@corsosystems.com' 14 | 15 | 16 | NOP = lambda x: None 17 | 18 | 19 | try: 20 | sys_current_trace = sys.gettrace 21 | except AttributeError: 22 | sys_current_trace = NOP 23 | 24 | 25 | def isPerspectiveDesigner(): 26 | try: 27 | return self.session.props.device.type == 'Designer' 28 | except: 29 | return False 30 | 31 | 32 | class MetaOverwatch(type): 33 | 34 | def __new__(cls, clsname, bases, attrs): 35 | for base in bases: 36 | event_labels = getattr(base,'_event_labels') 37 | if event_labels: 38 | event_callbacks = set(['_%s' % el for el in event_labels]) 39 | break 40 | else: 41 | raise AttributeError('Base class(es) missing _event_map! This is needed to resolve what is needed.') 42 | 43 | for base in bases: 44 | configured_events = set(getattr(base, '_configured_events', set())) 45 | if configured_events: 46 | break 47 | 48 | for attr in attrs: 49 | if attr in event_callbacks: 50 | configured_events.add(attr) 51 | attrs['_configured_events'] = configured_events 52 | 53 | newclass = super(MetaOverwatch, cls).__new__(cls, clsname, bases, attrs) 54 | return newclass 55 | 56 | 57 | class BlindOverwatch(object): 58 | """Template class that sets the basis for the rest.""" 59 | _callback_function = NOP 60 | _callback_current = NOP 61 | 62 | _configured_events = set() 63 | 64 | _event_labels = set(['call', 'line', 'return', 'exception', 'c_call', 'c_return', 'c_exception']) 65 | 66 | def dispatch(self, frame, event, arg): 67 | self._callback_function(None) 68 | 69 | # # local trace funtions 70 | def _nop(self, _1=None,_2=None): 71 | pass 72 | 73 | def _call(self, frame, _=None): 74 | pass 75 | def _line(self, frame, _=None): 76 | pass 77 | def _return(self, frame, return_value): 78 | pass 79 | def _exception(self, frame, (exception, value, traceback)): 80 | pass 81 | 82 | def _c_call(self, frame, _=None): 83 | pass 84 | def _c_return(self, frame, return_value): 85 | pass 86 | def _c_exception(self, frame, (exception, value, traceback)): 87 | pass 88 | 89 | 90 | class Overwatch(BlindOverwatch): 91 | __metaclass__ = MetaOverwatch 92 | __slots__ = ('_previous_callback', '_cb_retval') 93 | 94 | _callback_function = sys.settrace 95 | _callback_current = sys_current_trace 96 | 97 | def __init__(self, replaceExisting=False, debugDesignerOnly=True): 98 | if debugDesignerOnly and not isPerspectiveDesigner(): 99 | return 100 | 101 | # Buffer any current callbacks, if desired 102 | if replaceExisting or sys_current_trace is NOP: 103 | self._previous_callback = None 104 | else: 105 | self._previous_callback = self._callback_current() 106 | 107 | # remove the leading underscore and map it to the event 108 | self._callbacks = dict((event[1:],getattr(self,event)) 109 | for event in self._configured_events) 110 | 111 | self._callback_function(self.dispatch) 112 | 113 | 114 | def dispatch(self, frame, event, arg): 115 | if self._previous_callback: 116 | self._previous_callback = self._previous_callback(frame, event, arg) 117 | 118 | self._cb_retval = self._callbacks.get(event,None) 119 | if self._cb_retval: 120 | if self._cb_retval(frame,arg) is None: 121 | del self._callbacks[event] 122 | 123 | if self._callbacks: 124 | return self.dispatch 125 | else: 126 | self._callback_function(self._previous_callback) -------------------------------------------------------------------------------- /shared/tools/profile.py: -------------------------------------------------------------------------------- 1 | """ 2 | Profiling helper functions 3 | 4 | Timeit is built into Python, but its incantation is just a little unobvious. 5 | But there's a lot of win to be had just _testing_ what's faster. 6 | Jython is not CPython, and has rather different strengths and weaknesses. 7 | 8 | 9 | 10 | So when in doubt: test! See how long something really takes! 11 | """ 12 | 13 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 14 | __license__ = 'Apache 2.0' 15 | __maintainer__ = 'Andrew Geiger' 16 | __email__ = 'andrew.geiger@corsosystems.com' 17 | 18 | 19 | import timeit 20 | import math 21 | from textwrap import dedent 22 | import tempfile 23 | import profile, pstats 24 | import sys, os 25 | 26 | 27 | 28 | statement_time = lambda statement, setup, iterations: timeit.Timer(statement, setup).repeat(iterations) 29 | 30 | 31 | 32 | def number_to_thousands_separated_string(number, sep=','): 33 | chunks = [] 34 | numstr = str(int(number)) 35 | chunk = '' 36 | for i,c in enumerate(numstr[::-1]): 37 | chunk += c 38 | if (i+1) % 3 == 0: 39 | chunks.append(chunk[::-1]) 40 | chunk = '' 41 | if chunk: 42 | chunks.append(chunk[::-1]) 43 | return sep.join(chunks[::-1]) 44 | 45 | 46 | 47 | def convert_to_human_readable(number, unit='s'): 48 | scale = 1 49 | vstr = str(number) 50 | if number < 0.000001: 51 | vstr = '%0.3f' % round(number * 1000.0 * 1000.0 * 1000.0, 3) 52 | scale = 'n' 53 | elif number < 0.001: 54 | vstr = '%0.3f' % round(number * 1000.0 * 1000.0, 3) 55 | scale = 'u' 56 | elif number < 1.0: 57 | vstr = '%0.3f' % round(number * 1000.0, 3) 58 | scale = 'm' 59 | else: 60 | n,d = divmod(number, 1) 61 | if not n: 62 | vstr = '%0.3f' % round(number, 3) 63 | else: 64 | vstr = number_to_thousands_separated_string(n) 65 | vstr += '%0.3f' % round(d, 3) 66 | scale = '' 67 | 68 | return '%s %s%s' % (vstr, scale, unit) 69 | 70 | 71 | 72 | def profile_script(python_script, context_globals=None, context_locals=None): 73 | profiler = profile.Profile() 74 | 75 | mutable_global_context = (context_globals or {}).copy() 76 | mutable_locals_context = (context_locals or {}).copy() 77 | 78 | profiler.runctx( 79 | python_script, 80 | mutable_global_context, 81 | mutable_locals_context, 82 | ) 83 | 84 | return profiler, (mutable_global_context, mutable_locals_context) 85 | 86 | 87 | def profile_call(function, *args, **kwargs): 88 | """Calls the function""" 89 | profiler = profile.Profile() 90 | 91 | # do not use runctx with a compiled function - it will NPE 92 | # (no obvious reason, it breaks on `sys.setprofile(None)` which should be safe) 93 | results = profiler.runcall(function, *args, **kwargs) 94 | 95 | return profiler, results 96 | 97 | 98 | def log_profile(profiler, results, log_target=None, sorting='tottime'): 99 | """Log the results given a logging context. 100 | When log_target is: 101 | - None - prints to standard out 102 | - includes path separators - dumps raw stats to disk 103 | - is a normal string - prints to logger 104 | """ 105 | if log_target is None: 106 | # print the results 107 | profiler_stats = pstats.Stats(profiler) 108 | profiler_stats.sort_stats(sorting).print_stats() 109 | elif any(['/' in log_target, '\\' in log_target]): 110 | profiler_stats = pstats.Stats(profiler) 111 | profiler_stats.sort_stats(sorting).dump_stats(log_target) 112 | else: 113 | from StringIO import StringIO 114 | prof_stats_output = StringIO() 115 | profiler_stats = pstats.Stats(profiler, stream=prof_stats_output) 116 | profiler_stats.sort_stats(sorting).print_stats() 117 | prof_stats_output.seek(0) 118 | system.util.getLogger(log_target).info(prof_stats_output.read()) 119 | 120 | return results 121 | 122 | 123 | def time_it(statement_to_time='pass', setup_statement='pass', iterations=100, setup_executions=3): 124 | """Time the given statement iterations number of times. 125 | If provided, a setup statement can be used to avoid timing setup. 126 | The full test/setup will be repeated setup_executions times. 127 | """ 128 | results = { 129 | 'setup repeated': setup_executions, 130 | 'iterations per setup': iterations, 131 | } 132 | 133 | # bail on nothing to do =/ 134 | if not all([setup_executions, iterations]): 135 | return results 136 | 137 | # create the timer 138 | timer = timeit.Timer(statement_to_time, setup_statement) 139 | 140 | # run the test(s) 141 | times = timer.repeat(setup_executions, iterations) 142 | 143 | # calculate some statistics 144 | values = times 145 | 146 | total = math.fsum(values) 147 | mu = total / len(values) 148 | results['round avg'] = mu 149 | results['statement avg'] = mu/iterations 150 | 151 | if setup_executions < 2: 152 | results['round std dev'] = None 153 | else: 154 | v = math.fsum(pow(x-mu,2) for x in values) / (len(values) - 1) 155 | sd = math.sqrt(v) 156 | results['round std dev'] = sd 157 | results['est statement std dev'] = sd/iterations 158 | 159 | return results 160 | 161 | 162 | 163 | def print_time_it(statement_to_time='pass', setup_statement='pass', iterations=100, setup_executions=3, direct_print=True, include_source=False): 164 | 165 | results = time_it(statement_to_time, setup_statement, iterations, setup_executions) 166 | 167 | for key,value in results.items(): 168 | if value is None: 169 | results[key] = '' 170 | elif key.endswith('std dev'): 171 | results[key] = ' (±%s)' % convert_to_human_readable(value) 172 | else: 173 | results[key] = convert_to_human_readable(value) 174 | 175 | 176 | report = dedent(""" 177 | For %(iterations)d iterations across %(setup_executions)s setups: 178 | %(statement avg)s%(est statement std dev)s per statement 179 | %(round avg)s%(round std dev)s total per round 180 | """ % dict(results.items() + locals().items())) 181 | 182 | if include_source and isinstance(setup_statement, str): 183 | # if setup_statement.count('\n') > 5: 184 | # setup_statement = '\n'.join(setup_statement.splitlines()[:5] + ['...']) 185 | report += dedent(""" 186 | Setup: 187 | %s 188 | """ % setup_statement) 189 | 190 | if include_source and isinstance(statement_to_time, str): 191 | # if statement_to_time.count('\n') > 3: 192 | # statement_to_time = '\n'.join(statement_to_time.splitlines()[:3] + ['...']) 193 | report += dedent(""" 194 | Statement: 195 | %s 196 | """ % statement_to_time) 197 | 198 | if direct_print: 199 | print report 200 | else: 201 | return report 202 | -------------------------------------------------------------------------------- /shared/tools/runtime.py: -------------------------------------------------------------------------------- 1 | """ 2 | Classes to make adding things to the Ignition runtime easier 3 | 4 | """ 5 | from uuid import UUID 6 | 7 | 8 | __all__ = ['RuntimeAddition'] 9 | 10 | 11 | def nop(*args, **kwargs): 12 | pass 13 | 14 | 15 | class RuntimeAddition(object): 16 | _cls_static_uuid = None 17 | 18 | _EVENT_NAME_TYPES = 'mouse action'.split() 19 | 20 | def __init__(self, configuration=None): 21 | super(RuntimeAddition, self).__init__() 22 | self._config = configuration or {} 23 | 24 | for possible_attribute, value in self._config.items(): 25 | if not possible_attribute in self._EVENT_NAME_TYPES: 26 | try: 27 | getattr(self, 'set' + possible_attribute.capitalize())(value) 28 | except AttributeError: 29 | pass 30 | 31 | # Generalized for simple convenience 32 | # Don't configure mouse events if the class combined with RuntimeAddition 33 | # doesn't support mouse events. 34 | # That would be silly. 35 | # This covers the swing components in general. 36 | if 'mouse' in self._config: 37 | mouse_listener = MouseReaction(configuration=self._config['mouse']) 38 | self.addMouseListener(mouse_listener) 39 | 40 | if 'action' in self._config: 41 | action_listener = ActionReaction(configuration=self._config['action']) 42 | self.addActionListener(action_listener) 43 | 44 | 45 | def _call_or_iterate_calls(self, config_key, event): 46 | todo = self._config.get(config_key, nop) 47 | try: 48 | for call in todo: 49 | print 'iterating %s with %r' % (config_key, call) 50 | call(event) 51 | except TypeError: 52 | print 'iteration failed, calling %r' % (todo,) 53 | todo(event) 54 | 55 | 56 | @classmethod 57 | def isInstance(cls, something): 58 | assert cls._cls_static_uuid, "Listener classes must have a hard-coded uuid set for sanity's sake." 59 | try: 60 | if cls._cls_static_uuid == something._cls_static_uuid: 61 | return True 62 | except: 63 | # name-based check (perhaps less safe) 64 | if ('$%s$' % cls.__name__) in repr(something): 65 | return True 66 | return False 67 | 68 | 69 | from java.awt.event import MouseAdapter, MouseEvent, MouseWheelEvent 70 | 71 | class MouseReaction(RuntimeAddition, MouseAdapter): 72 | _cls_static_uuid = UUID('51ad3eb6-737a-4cfb-96ac-fc29f2cb10b5') 73 | 74 | def mouseClicked(self, mouse_event): 75 | assert isinstance(mouse_event, MouseEvent) 76 | self._call_or_iterate_calls('clicked', mouse_event) 77 | 78 | def mouseDragged(self, mouse_event): 79 | assert isinstance(mouse_event, MouseEvent) 80 | self._call_or_iterate_calls('dragged', mouse_event) 81 | 82 | def mouseEntered(self, mouse_event): 83 | assert isinstance(mouse_event, MouseEvent) 84 | self._call_or_iterate_calls('entered', mouse_event) 85 | 86 | def mouseExited(self, mouse_event): 87 | assert isinstance(mouse_event, MouseEvent) 88 | self._call_or_iterate_calls('exited', mouse_event) 89 | 90 | def mouseMoved(self, mouse_event): 91 | assert isinstance(mouse_event, MouseEvent) 92 | self._call_or_iterate_calls('moved', mouse_event) 93 | 94 | def mousePressed(self, mouse_event): 95 | assert isinstance(mouse_event, MouseEvent) 96 | self._call_or_iterate_calls('pressed', mouse_event) 97 | 98 | def mouseReleased(self, mouse_event): 99 | assert isinstance(mouse_event, MouseEvent) 100 | self._call_or_iterate_calls('released', mouse_event) 101 | 102 | def mouseWheelMoved(self, wheel_event): 103 | assert isinstance(wheel_event, MouseWheelEvent) 104 | self._call_or_iterate_calls('wheel', wheel_event) 105 | 106 | 107 | from java.awt.event import ActionListener, ActionEvent 108 | 109 | class ActionReaction(RuntimeAddition, ActionListener): 110 | _cls_static_uuid = UUID('c6f3836e-aa92-4489-b796-4f5834adbcc0') 111 | 112 | def actionPerformed(self, action_event): 113 | assert isinstance(action_event, ActionEvent) 114 | self._call_or_iterate_calls('performed', action_event) 115 | 116 | -------------------------------------------------------------------------------- /shared/tools/sidecar.py: -------------------------------------------------------------------------------- 1 | from shared.tools.thread import async 2 | from shared.tools.global import ExtraGlobal 3 | 4 | import BaseHTTPServer 5 | from cgi import escape 6 | import urlparse 7 | import urllib 8 | 9 | 10 | class SimpleServer(BaseHTTPServer.HTTPServer): 11 | allow_reuse_address = True 12 | 13 | def handle_error(self, request, client_address): 14 | system.util.getLogger('Sidecar').error('Error with %r: %r to [%r]' %(self, request, client_address)) 15 | # request.send_response(400) 16 | # request.send_header("Content-type", "text/html") 17 | # request.end_headers() 18 | # request.wfile.write('Error executing %(command)s for %(client_address)s' % request) 19 | 20 | 21 | class SimpleREST(BaseHTTPServer.BaseHTTPRequestHandler): 22 | 23 | def respond_html(self, html): 24 | self.send_response(200) 25 | self.send_header("Content-type", "text/html") 26 | self.end_headers() 27 | self.wfile.write(html) 28 | 29 | @staticmethod 30 | def html_escape(some_string): 31 | return escape(some_string.decode('utf8')) 32 | 33 | def __getitem__(self, attribute): 34 | """Make this dict-like to simplify things a bit.""" 35 | try: 36 | return getattr(self, attribute) 37 | except AttributeError: 38 | raise KeyError('%s is not available for this handler') 39 | 40 | @property 41 | def fields(self): 42 | try: 43 | length = int(self.headers.getheader('content-length')) 44 | field_data = self.rfile.read(length) 45 | return urlparse.parse_qs(field_data) 46 | except: 47 | return None 48 | 49 | @property 50 | def params(self): 51 | query_string = urlparse.urlsplit(self.path)[3] 52 | try: 53 | paramdict = urlparse.parse_qs(query_string) 54 | return dict( ( key, 55 | value[0] if isinstance(value, list) and len(value) == 1 else value 56 | ) 57 | for key, value 58 | in paramdict.items()) 59 | 60 | except: 61 | params = {} 62 | for entry in query_string.split('&'): 63 | key,_,value = entry.partition('=') 64 | params[urllib.unquote(key)] = urllib.unquote(value) 65 | return params 66 | 67 | 68 | def shutdown(port): 69 | session = ExtraGlobal.setdefault(port, 'Sidecar', {}) 70 | session['shutdown'] = True 71 | 72 | 73 | @async(name='Sidecar-REST') 74 | def launch_sidecar(port, RestHandler, hostname='localhost', resume_session=True, session_timeout=600): 75 | """ 76 | This assumes that keep_running() is a function of no arguments which 77 | is tested initially and after each request. If its return value 78 | is true, the server continues. 79 | """ 80 | system.util.getLogger('Sidecar').info("Launching sidecar on port %r with %r" % (port, RestHandler)) 81 | 82 | if resume_session: 83 | session = ExtraGlobal.setdefault(port, 'Sidecar', {}, lifespan=session_timeout) 84 | else: 85 | ExtraGlobal.stash({}, port, 'Sidecar', lifespan=session_timeout) 86 | session = ExtraGlobal.access(port, 'Sidecar') 87 | 88 | server_address = (hostname, port) 89 | httpd = SimpleServer(server_address, RestHandler) 90 | try: 91 | system.util.getLogger('Sidecar').info('Sidecar started at http://%s:%r' % (hostname, port,)) 92 | 93 | while not ExtraGlobal.setdefault(port, 'Sidecar', {}, lifespan=session_timeout).get('shutdown', False): 94 | httpd.handle_request() 95 | except Exception, error: 96 | system.util.getLogger('Sidecar').info("Exception on port %r: %s %r" % (port,type(error), error)) 97 | 98 | except: 99 | pass 100 | finally: 101 | #print 'Shutting down %r' % (httpd.server_address,) 102 | httpd.server_close() 103 | ExtraGlobal.trash(port, 'Sidecar') # clear session 104 | #print '... done!' 105 | system.util.getLogger('Sidecar').info("Sidecar torn down from port %r" % (port,)) 106 | -------------------------------------------------------------------------------- /shared/tools/snapshot/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CorsoSource/metatools/a4252820c42c5907bb08cfef4b5817362aa6a09c/shared/tools/snapshot/__init__.py -------------------------------------------------------------------------------- /shared/tools/snapshot/engine.py: -------------------------------------------------------------------------------- 1 | """ 2 | Snapshot versioning of Ignition resources 3 | 4 | Take stuff in Ignition and put it to disk using words. 5 | Then let other programs read those words and tell you what's different! 6 | 7 | This is a one-way utility: it dumps TO disk, FROM Ignition. 8 | It could go the other way too in _some_ circumstances, but that's not the goal. 9 | The goal here is to make it possible to comprehensively and confidently know 10 | what is different between two projects (or one project's differences over time). 11 | And with enough cleverness, you can inspect history and compare many projects! 12 | """ 13 | 14 | from __future__ import with_statement 15 | import os, shutil, re 16 | 17 | from shared.tools.snapshot.utils import getDesignerContext 18 | 19 | 20 | # Load in extractors 21 | RESOURCE_EXTRACTORS = { 22 | '__folder': None, 23 | } 24 | 25 | BULK_GLOBAL_EXTRACTORS = [] 26 | BULK_PROJECT_EXTRACTORS = [] 27 | 28 | _EXTRACTORS = [ 29 | 'shared.tools.snapshot.ia.global', 30 | 'shared.tools.snapshot.ia.project', 31 | # 'shared.tools.snapshot.ia.reporting', 32 | 'shared.tools.snapshot.ia.tags', 33 | 'shared.tools.snapshot.ia.vision', 34 | 'shared.tools.snapshot.ia.webdev', 35 | 'shared.tools.snapshot.sepasoft.webservices', 36 | 'shared.tools.snapshot.sepasoft.model', 37 | ] 38 | 39 | _HOTLOADING_SCOPE = 'bootstrap.' 40 | 41 | for module_path in _EXTRACTORS: 42 | try: 43 | assert module_path.startswith(_HOTLOADING_SCOPE), "Extractors are expected to be hot loaded from the `%s` scripts." % _HOTLOADING_SCOPE 44 | module = reduce(getattr, module_path.split('.')[1:], shared) 45 | 46 | RESOURCE_EXTRACTORS.update(getattr(module, 'EXTRACTORS', {})) 47 | BULK_GLOBAL_EXTRACTORS += getattr(module, 'BULK_GLOBAL_EXTRACTORS', []) 48 | BULK_PROJECT_EXTRACTORS += getattr(module, 'BULK_PROJECT_EXTRACTORS', []) 49 | except: 50 | pass 51 | 52 | 53 | def nop_dict(*args, **kwargs): 54 | return {} 55 | 56 | 57 | def extract_resources(resources, category='', context=None): 58 | """Extract resource data. Category prepends to each resource's path""" 59 | if context is None: 60 | context = getDesignerContext() 61 | 62 | deserializer = context.createDeserializer() 63 | 64 | extracted_data = {} 65 | 66 | for res_path, resource in resources.items(): 67 | res_type = resource.getResourceType() 68 | extractor = RESOURCE_EXTRACTORS.get(res_type, None) 69 | 70 | if not extractor: 71 | #print 'No extractor for %s' % res_type 72 | continue 73 | 74 | try: 75 | data_context = deserializer.deserializeBinary(resource.getData()) 76 | except SerializationException, error: 77 | print 'Resource did not deserialize: %s\n%r (type: %s)' % (res_path, resource, res_type) 78 | print ' Err: %r' % error 79 | 80 | resource_objects = [obj for obj in data_context.getRootObjects()] 81 | 82 | dest_path, _, _ = res_path.rpartition('/') 83 | 84 | try: 85 | res_name = resource.getName() 86 | if res_name: 87 | dest_path += '/' + res_name 88 | except: 89 | pass 90 | 91 | if category: 92 | dest_path = category + '/' + dest_path 93 | 94 | # Gather any extra bits of context if the extractor needs it 95 | # (Skip the first, since it will always be resource_objects) 96 | keyword_arguments = {} 97 | num_extra_args = extractor.func_code.co_argcount-1 98 | if num_extra_args: 99 | for kwarg in extractor.func_code.co_varnames[1:][:num_extra_args]: 100 | keyword_arguments[kwarg] = locals()[kwarg] 101 | extracted_data[dest_path] = extractor(resource_objects, **keyword_arguments) 102 | 103 | return extracted_data 104 | 105 | 106 | def dump_extracted_resources(destination_folder, extracted_data, purge_first=False): 107 | """ 108 | Dump the contents of the given extracted data into the destination folder. 109 | If purge_first is set True, then the destination will be deleted before dumping. 110 | """ 111 | if purge_first and os.path.exists(destination_folder): 112 | for subdir in os.listdir(destination_folder): 113 | if subdir.startswith('.'): 114 | continue 115 | 116 | try: 117 | shutil.rmtree(destination_folder + '/' + subdir) 118 | except OSError: 119 | print 'Destination folder not completely purged - check for open files!' 120 | 121 | for resource_path, resource_details in extracted_data.items(): 122 | resource_path, _, name = resource_path.rpartition('/') 123 | 124 | destination = '%s/%s' % (destination_folder, resource_path) 125 | 126 | for suffix, data in resource_details.items(): 127 | 128 | if suffix.startswith('.'): 129 | filepath = '%s/%s%s' % (destination, name, suffix) 130 | else: 131 | filepath = '%s/%s' % (destination, suffix) 132 | 133 | if data is None: 134 | print 'No data! %s' % filepath 135 | continue 136 | 137 | 138 | if not os.path.exists(filepath.rpartition('/')[0]): 139 | os.makedirs(filepath.rpartition('/')[0]) 140 | 141 | with open(filepath, 'wb') as f: 142 | f.write(data) 143 | 144 | 145 | def coredump(destination_folder): 146 | #destination_folder = 'C:/Workspace/temp/extraction-2' 147 | 148 | context = getDesignerContext() 149 | 150 | global_project = context.getGlobalProject().getProject() 151 | designer_project = context.getProject() 152 | 153 | global_resources = dict( 154 | ('%s/%s' % (resource.getResourceType(), global_project.getFolderPath(resource.getResourceId())) or '', resource) 155 | for resource 156 | in global_project.getResources() 157 | ) 158 | 159 | project_resources = dict( 160 | ('%s/%s' % (resource.getResourceType(), designer_project.getFolderPath(resource.getResourceId())) or '', resource) 161 | for resource 162 | in designer_project.getResources() 163 | ) 164 | 165 | extracted_resources = {} 166 | 167 | extracted_resources['project/properties'] = RESOURCE_EXTRACTORS['project/properties'](context) 168 | 169 | extracted_resources.update(extract_resources(global_resources, 'global')) 170 | extracted_resources.update(extract_resources(project_resources, 'project')) 171 | 172 | for bulk_extractor in BULK_GLOBAL_EXTRACTORS: 173 | extracted_resources.update(bulk_extractor(global_project)) 174 | 175 | for bulk_extractor in BULK_PROJECT_EXTRACTORS: 176 | extracted_resources.update(bulk_extractor(designer_project)) 177 | 178 | dump_extracted_resources(destination_folder, extracted_resources, purge_first=True) 179 | 180 | -------------------------------------------------------------------------------- /shared/tools/snapshot/ia/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CorsoSource/metatools/a4252820c42c5907bb08cfef4b5817362aa6a09c/shared/tools/snapshot/ia/__init__.py -------------------------------------------------------------------------------- /shared/tools/snapshot/ia/global.py: -------------------------------------------------------------------------------- 1 | """ 2 | Global resources 3 | 4 | Ignition before 8 always has a sort-of meta project called global that all other 5 | projects can use and reference. 6 | """ 7 | 8 | from shared.tools.snapshot.utils import encode, propsetToDict 9 | 10 | 11 | def extract_global_script(resource_objects): 12 | assert len(resource_objects) == 1, 'Resource is expected to be contained in one root object' 13 | 14 | script = resource_objects[0] 15 | 16 | return { 17 | '.py': script, 18 | } 19 | 20 | 21 | def extract_alarmpipeline(resource_objects): 22 | assert len(resource_objects) == 1, 'Resource is expected to be contained in one root object' 23 | 24 | configuration = propsetToDict(resource_objects[0], recurse=True) 25 | 26 | return dict([ 27 | encode(configuration), 28 | ]) 29 | 30 | 31 | # Ready for the dispatcher 32 | EXTRACTORS = { 33 | 'sr.script.shared': extract_global_script, 34 | 'alarm-pipeline': extract_alarmpipeline, 35 | } -------------------------------------------------------------------------------- /shared/tools/snapshot/ia/project.py: -------------------------------------------------------------------------------- 1 | """ 2 | Project resources 3 | 4 | Many configuration and scripting resources are extracted here. 5 | """ 6 | from shared.tools.snapshot.utils import encode, hashmapToDict 7 | 8 | 9 | def extract_project_props(client_context): 10 | 11 | global_props = client_context.getGlobalProps() 12 | 13 | configuration = { 14 | 'permissions': hashmapToDict(global_props.getPermissionEnabledMap()), 15 | 'roles': { 16 | 'client': dict((category, [role.strip() 17 | for role 18 | in role_string.split(',') 19 | if role 20 | ]) 21 | for category, role_string 22 | in hashmapToDict( 23 | global_props.getRequiredClientRolesMap() 24 | ).items()), 25 | 'delete' : [role.strip() for role in global_props.getRequiredDeleteRoles()], 26 | 'publish' : [role.strip() for role in global_props.getRequiredPublishRoles()], 27 | 'resource': [role.strip() for role in global_props.getRequiredResourceRoles()], 28 | 'required': [role.strip() for role in global_props.getRequiredRoles()], 29 | 'save' : [role.strip() for role in global_props.getRequiredSaveRoles()], 30 | 'view' : [role.strip() for role in global_props.getRequiredViewRoles()], 31 | }, 32 | 'auditing': global_props.isAuditingEnabled(), 33 | 'legacy': global_props.isLegacyProject(), 34 | 'commitMessageMode': global_props.getCommitMessageMode().toString(), # enum 35 | 'defaultSQLTagsProviderRate': global_props.getSqltagsClientPollRate(), 36 | } 37 | 38 | defaultable_attributes = set([ 39 | 'auditProfileName', 40 | 'authProfileName', 41 | 'defaultDatasourceName', 42 | 'defaultSQLTagsProviderName', 43 | 'publishMode', 44 | ]) 45 | 46 | for attribute in defaultable_attributes: 47 | try: # to get the Java getter first 48 | # it's slightly more reliable than the Jython auto-attribute, in general 49 | getter_name = 'get' + attribute[0].upper() + attribute[1:] 50 | value = getattr(global_props, getter_name)() 51 | except AttributeError: 52 | try: # the Jython attribute 53 | value = getattr(global_props, attribute) 54 | except AttributeError: 55 | value = None 56 | 57 | if value is None: 58 | continue 59 | 60 | configuration[attribute] = value 61 | 62 | return dict([ 63 | encode(configuration), 64 | ]) 65 | 66 | def extract_gatewayevents(resource_objects): 67 | assert len(resource_objects) == 1, 'Resource is expected to be contained in one root object' 68 | 69 | client_script_config = resource_objects[0] 70 | 71 | scripts = {} 72 | 73 | script = client_script_config.getStartupScript() 74 | if script: 75 | scripts['startup.py'] = script 76 | 77 | script = client_script_config.getShutdownScript() 78 | if script: 79 | scripts['shutdown.py'] = script 80 | 81 | timer_scripts = client_script_config.getTimerScripts() 82 | for timer_script in timer_scripts: 83 | suffix, serialized = encode({ 84 | 'enabled': timer_script.isEnabled(), 85 | 'timing': 'delay' if timer_script.isFixedDelay() else 'rate', 86 | 'period': timer_script.getDelay(), 87 | 'threading': 'shared' if timer_script.isSharedThread() else 'dedicated', 88 | }) 89 | scripts['timer/%s%s' % (timer_script.getName(), suffix)] = serialized 90 | scripts['timer/%s.py' % timer_script.getName()] = timer_scripts[timer_script] 91 | 92 | for tag_script in client_script_config.getTagChangeScripts(): 93 | suffix, serialized = encode({ 94 | 'name': tag_script.getName(), 95 | 'tags': [tag_path for tag_path in tag_script.getPaths()], 96 | 'triggers': [t.toString() for t in tag_script.getChangeTypes()], 97 | 'enabled': tag_script.isEnabled(), 98 | }) 99 | scripts['tag-change/%s%s' % (tag_script.getName(), suffix)] = serialized 100 | scripts['tag-change/%s.py' % tag_script.getName()] = tag_script.getScript() 101 | 102 | message_scripts = client_script_config.getMessageHandlerScripts() 103 | for message_script in message_scripts: 104 | suffix, serialized = encode({ 105 | 'name': message_script.getName(), 106 | 'threading': str(message_script.getThreadType()), 107 | 'enabled': message_script.isEnabled(), 108 | }) 109 | scripts['message/%s%s' % (message_script.getName(),suffix)] = serialized 110 | scripts['message/%s.py' % message_script.getName()] = message_scripts[message_script] 111 | 112 | return scripts 113 | 114 | 115 | def extract_clientevents(resource_objects): 116 | assert len(resource_objects) == 1, 'Resource is expected to be contained in one root object' 117 | 118 | client_script_config = resource_objects[0] 119 | 120 | scripts = {} 121 | 122 | script = client_script_config.getStartupScript() 123 | if script: 124 | scripts['startup.py'] = script 125 | 126 | script = client_script_config.getShutdownScript() 127 | if script: 128 | scripts['shutdown.py'] = script 129 | 130 | script = client_script_config.getShutdownAllowedScript() 131 | if script: 132 | scripts['shutdown-intercept.py'] = script 133 | 134 | key_schema_pattern = re.compile("(\[(?P.*)\] )?(?P.*) \((?P.*)\)") 135 | key_modifier_pattern = re.compile("(Button \d|\w+)") 136 | 137 | key_scripts = client_script_config.getKeyScripts() 138 | for kix, key_script in enumerate(key_scripts): 139 | key_config = key_schema_pattern.match(key_script.getDisplay()).groupdict() 140 | suffix, serialized = encode({ 141 | 'action': key_config['action'], 142 | 'key': key_config['key'].replace("'", ''), 143 | 'modifiers': key_modifier_pattern.findall(key_config['modifiers']) if key_config['modifiers'] else [] 144 | }) 145 | scripts['key/%s%s' % (key_script.getDisplay(), suffix)] = serialized 146 | scripts['key/%s.py' % key_script.getDisplay()] = key_scripts[key_script] 147 | 148 | timer_scripts = client_script_config.getTimerScripts() 149 | for timer_script in timer_scripts: 150 | suffix, serialized = encode({ 151 | 'enabled': timer_script.isEnabled(), 152 | 'timing': 'delay' if timer_script.isFixedDelay() else 'rate', 153 | 'period': timer_script.getDelay(), 154 | 'threading': 'shared' if timer_script.isSharedThread() else 'dedicated', 155 | }) 156 | scripts['timer/%s%s' % (timer_script.getName(), suffix)] = serialized 157 | scripts['timer/%s.py' % timer_script.getName()] = timer_scripts[timer_script] 158 | 159 | for tag_script in client_script_config.getTagChangeScripts(): 160 | suffix, serialized = encode({ 161 | 'name': tag_script.getName(), 162 | 'tags': [tag_path for tag_path in tag_script.getPaths()], 163 | 'triggers': [t.toString() for t in tag_script.getChangeTypes()], 164 | 'enabled': tag_script.isEnabled(), 165 | }) 166 | scripts['tag-change/%s%s' % (tag_script.getName(), suffix)] = serialized 167 | scripts['tag-change/%s.py' % tag_script.getName()] = tag_script.getScript() 168 | 169 | def traverse_menu(parent_path, menu_node, mutable_dict): 170 | for mix, child in enumerate(menu_node.getChildren() or []): 171 | suffix, serialized = encode({ 172 | 'name': child.getName(), 173 | 'icon': child.getIconPath(), 174 | 'mnemonic': child.getMnemonic(), 175 | 'description': child.getDescription(), 176 | 'accelerator': child.getAccelerator(), 177 | }) 178 | mutable_dict['%s/entry-%02d%s' % ('/'.join(parent_path), mix, suffix)] = serialized 179 | mutable_dict['%s/entry-%02d.py' % ('/'.join(parent_path), mix)] = child.getScript() 180 | 181 | traverse_menu(parent_path + [child.getName() or ('Submenu-%02d' % mix)], child, mutable_dict) 182 | 183 | menu_root = client_script_config.getMenuRoot() 184 | traverse_menu(['menu'], menu_root, scripts) 185 | 186 | message_scripts = client_script_config.getMessageHandlerScripts() 187 | for message_script in message_scripts: 188 | suffix, serialized = encode({ 189 | 'name': message_script.getName(), 190 | 'threading': str(message_script.getThreadType()), 191 | 'enabled': message_script.isEnabled(), 192 | }) 193 | scripts['message/%s%s' % (message_script.getName(), suffix)] = serialized 194 | scripts['message/%s.py' % message_script.getName()] = message_scripts[message_script] 195 | 196 | return scripts 197 | 198 | 199 | def extract_namedquery(resource_objects): 200 | assert len(resource_objects) == 1, 'Resource is expected to be contained in one root object' 201 | 202 | named_query = resource_objects[0] 203 | 204 | info = { 205 | 'query': named_query.getQuery(), 206 | 'database': named_query.getDatabase() or '-default-', 207 | 'parameters': dict( 208 | (param.getIdentifier(), { 209 | 'sql_type' : str(param.getSqlType()), 210 | 'type' : str(param.getType()), 211 | 'identifier': str(param.getIdentifier()), 212 | }) 213 | for param 214 | in named_query.getParameters() 215 | ), 216 | 'type': named_query.getType(), 217 | } 218 | 219 | return dict([ 220 | ('.sql', format_sql(info['query'])), 221 | encode(info), 222 | ]) 223 | 224 | 225 | def extract_project_script(resource_objects): 226 | assert len(resource_objects) == 1, 'Resource is expected to be contained in one root object' 227 | 228 | script = resource_objects[0] 229 | 230 | return { 231 | '.py': script, 232 | } 233 | 234 | 235 | # Ready for the dispatcher 236 | EXTRACTORS = { 237 | 'sr.script.project': extract_project_script, 238 | 'named-query': extract_namedquery, 239 | 'client.event.scripts': extract_clientevents, 240 | 'event.scripts': extract_gatewayevents, 241 | 'project/properties': extract_project_props, 242 | } 243 | -------------------------------------------------------------------------------- /shared/tools/snapshot/ia/vision.py: -------------------------------------------------------------------------------- 1 | """ 2 | Vision windows and templates 3 | 4 | Windows and templates are dumped to disk as raw XML. This is a complete and total 5 | definition of the resource and can be used to determine micro-changes. 6 | Due to how the serializer may optimize the output, though, ordering and 7 | micro-changes in component configuations can throw diff utilities off severely. 8 | Either parse the XML directly and compare ordered XML trees for proper diffing, 9 | or traverse the window/template objects to get only the information of interest. 10 | 11 | TODO: Allow for non-XML dumps using only attributes of interest. 12 | """ 13 | 14 | from shared.tools.snapshot.utils import getSerializationCauses, serializeToXML 15 | 16 | 17 | def extract_window(resource_objects, deserializer=None): 18 | assert len(resource_objects) == 1, 'Resource is expected to be contained in one root object' 19 | 20 | window_info = resource_objects[0] 21 | 22 | try: 23 | window_context = deserializer.deserializeBinary(window_info.getSerializedCode()) 24 | except SerializationException, error: 25 | return { 26 | '.error': '\n'.join([str(e) for e in getSerializationCauses(error)]) 27 | } 28 | 29 | window = window_context.getRootObjects()[0] 30 | 31 | return { 32 | '.xml': serializeToXML(window) 33 | } 34 | 35 | 36 | def extract_template(resource_objects, deserializer=None): 37 | assert len(resource_objects) == 1, 'Resource is expected to be contained in one root object' 38 | 39 | template_info = resource_objects[0] 40 | 41 | try: 42 | template_context = deserializer.deserializeBinary(template_info.getSerializedBytes()) 43 | except SerializationException, error: 44 | return { 45 | '.error': '\n'.join([str(e) for e in getSerializationCauses(error)]) 46 | } 47 | 48 | template = template_context.getRootObjects()[0] 49 | 50 | return { 51 | '.xml': serializeToXML(template) 52 | } 53 | 54 | 55 | # Ready for the dispatcher 56 | EXTRACTORS = { 57 | 'window': extract_window, 58 | 'component-template': extract_template, 59 | } 60 | -------------------------------------------------------------------------------- /shared/tools/snapshot/sepasoft/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CorsoSource/metatools/a4252820c42c5907bb08cfef4b5817362aa6a09c/shared/tools/snapshot/sepasoft/__init__.py -------------------------------------------------------------------------------- /shared/tools/snapshot/sepasoft/model.py: -------------------------------------------------------------------------------- 1 | from shared.tools.snapshot.utils import encode, getDesignerContext 2 | 3 | from java.lang import Object 4 | from com.sepasoft.production.common.model.storage import ConvertLog 5 | CONVERTLOG_PLACEHOLDER = ConvertLog(Object()) 6 | 7 | 8 | MES_TYPES = set(['cell', 'cell_group', 'line', 'area', 'site', 'enterprise']) 9 | 10 | 11 | def trace_equipment_path(item, resolved_model): 12 | 13 | parent = resolved_model.get(item['Parent Production Item UUID'], None) 14 | if parent: 15 | return trace_equipment_path(parent, resolved_model) + [item['Name']] 16 | else: 17 | return [item['Name']] 18 | 19 | 20 | def resolve_model_item(resource_objects): 21 | 22 | storage_item = resource_objects[0] 23 | production_item = storage_item.convertToProductionItem('some string', CONVERTLOG_PLACEHOLDER) 24 | 25 | configuration = { 26 | 'Name': production_item.getName(), 27 | 'Type': production_item.getProductionType(), 28 | } 29 | 30 | if not production_item.getEnabled(): 31 | configuration['enabled'] = False 32 | 33 | properties = production_item.getProperties() 34 | property_ids = properties.getPropertyIDs() 35 | for prop_name in property_ids.keySet(): 36 | prop_id = property_ids[prop_name] 37 | prop_value = production_item.getPropertyValue(prop_id) 38 | if prop_value: 39 | configuration[prop_name] = prop_value 40 | 41 | for entries in production_item.getEntryProperties(): 42 | entry_list = configuration[entries.getDisplayName()] = [] 43 | 44 | entry_property_names = [eep.getPropertyName() 45 | for eep in entries.getEntryEditProperties()] 46 | 47 | for entry_key in entries.getEntries(): 48 | entry = entries.getEntries()[entry_key] 49 | 50 | entry_list.append(dict( 51 | (eep, entry.getPropertyValue(eep)) 52 | for eep in entry_property_names 53 | if not entry.getPropertyValue(eep) in (None, '') 54 | )) 55 | 56 | return configuration 57 | 58 | 59 | 60 | def extract_production_model(global_project, category='', context=None): 61 | if context is None: 62 | context = getDesignerContext() 63 | 64 | deserializer = context.createDeserializer() 65 | 66 | mes_resources = {} 67 | for resource in global_project.getResources(): 68 | resource_type = resource.getResourceType() 69 | if not resource_type in MES_TYPES: 70 | continue 71 | 72 | data_context = deserializer.deserializeBinary(resource.getData()) 73 | resource_objects = [obj for obj in data_context.getRootObjects()] 74 | 75 | model_item_config = resolve_model_item(resource_objects) 76 | model_item_config['Parent Production Item UUID'] = repr(resource.getParentUuid()) 77 | mes_resources[model_item_config['Production Item UUID']] = model_item_config 78 | 79 | extracted_resources = {} 80 | for pmi_uuid, model_item_config in mes_resources.items(): 81 | 82 | equipment_path = '/'.join(trace_equipment_path(model_item_config, mes_resources)) 83 | dest_path = model_item_config['Equipment Path'] = equipment_path 84 | dest_path = 'production_model' + '/' + dest_path 85 | if category: 86 | dest_path = category + '/' + dest_path 87 | extracted_resources[dest_path] = dict([encode(model_item_config)]) 88 | 89 | return extracted_resources 90 | 91 | 92 | # Ready for the dispatcher 93 | BULK_GLOBAL_EXTRACTORS = [extract_production_model] 94 | -------------------------------------------------------------------------------- /shared/tools/snapshot/utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Extraction utilities and supporting functions 3 | 4 | Some operations are used frequently or repeated enough to be factored out. 5 | 6 | Note that SQL can be used via the POORSQL_BINARY_PATH 7 | Download the binary from http://architectshack.com/PoorMansTSqlFormatter.ashx 8 | It's a phenominal utility that brilliantly normalizes SQL code. 9 | Have friends/coworkers/peers who missed an indent? This will prevent 10 | a diff utility from tripping up on that. 11 | """ 12 | from shared.data.yaml.core import dump 13 | 14 | from java.util import Date 15 | 16 | 17 | # Taken from the Metatools library, copied here for convenience 18 | def getDesignerContext(anchor=None): 19 | """Attempts to grab the Ignition designer context. 20 | This is most easily done with a Vision object, like a window. 21 | If no object is provided as a starting point, it will attempt to 22 | get one from the designer context. 23 | """ 24 | from com.inductiveautomation.ignition.designer import IgnitionDesigner 25 | 26 | if anchor is None: 27 | 28 | try: 29 | return IgnitionDesigner.getFrame().getContext() 30 | except: 31 | for windowName in system.gui.getWindowNames(): 32 | try: 33 | anchor = system.gui.getWindow(windowName) 34 | break 35 | except: 36 | pass 37 | else: 38 | raise LookupError("No open windows were found, so no context was derived by default.") 39 | 40 | try: 41 | anchor = anchor.source 42 | except AttributeError: 43 | pass 44 | # Just making sure we've a live object in the tree, not just an event object 45 | 46 | for i in range(50): 47 | if anchor.parent is None: 48 | break 49 | else: 50 | anchor = anchor.parent 51 | 52 | if isinstance(anchor,IgnitionDesigner): 53 | break 54 | else: 55 | raise RuntimeError("No Designer Context found in this object's heirarchy") 56 | 57 | context = anchor.getContext() 58 | return context 59 | 60 | 61 | POORSQL_BINARY_PATH = 'C:/Workspace/bin/SqlFormatter.exe' 62 | 63 | # from https://stackoverflow.com/a/165662/13229100 64 | from subprocess import Popen, PIPE, STDOUT 65 | 66 | def format_sql(raw_sql): 67 | """Normalize the SQL so it is consistent for diffing""" 68 | try: 69 | raise KeyboardInterrupt 70 | 71 | poorsql = Popen( 72 | [POORSQL_BINARY_PATH, 73 | ], stdout=PIPE, stdin=PIPE, stderr=STDOUT) 74 | 75 | formatted = poorsql.communicate(input=raw_sql)[0] 76 | 77 | return formatted.replace('\r\n', '\n').strip() 78 | except: 79 | return raw_sql 80 | 81 | 82 | 83 | import java.awt.Point, java.awt.Dimension, java.util.UUID 84 | 85 | BASE_TYPES = set([bool, float, int, long, None, str, unicode]) 86 | 87 | COERSION_MAP = { 88 | java.awt.Point: lambda v: {'x': v.getX(), 'y': v.getY()}, 89 | java.awt.Dimension: lambda v: {'width': v.getWidth(), 'height': v.getHeight()}, 90 | java.util.UUID: lambda v: str(v), 91 | } 92 | 93 | 94 | def coerceValue(value, default=str): 95 | if type(value) in BASE_TYPES: 96 | return value 97 | else: 98 | return COERSION_MAP.get(type(value), default)(value) 99 | 100 | 101 | #ptd = propsetToDict = lambda ps: dict([(p.getName(), ps.get(p)) for p in ps.getProperties()]) 102 | 103 | def propsetToDict(property_set, recurse=False, coersion=coerceValue, visited=None): 104 | if visited is None: 105 | visited = set() 106 | elif property_set in visited: 107 | return None 108 | 109 | result_dict = {} 110 | for prop in property_set.getProperties(): 111 | value = property_set.get(prop) 112 | 113 | if recurse and not type(value) in BASE_TYPES: 114 | try: 115 | deep = propsetToDict(value, recurse, coersion, visited) 116 | except: 117 | try: 118 | deep = [] 119 | for element in value: 120 | try: 121 | deep.append(propsetToDict(element, recurse, coersion, visited)) 122 | except: 123 | deep.append(coersion(element)) 124 | except: 125 | deep = None 126 | 127 | if deep: 128 | value = deep 129 | else: 130 | value = coersion(value) 131 | else: 132 | value = coersion(value) 133 | 134 | result_dict[prop.getName()] = value 135 | 136 | return result_dict 137 | 138 | 139 | def hashmapToDict(hashmap): 140 | return dict( 141 | (key, hashmap.get(key)) 142 | for key in hashmap.keySet() 143 | ) 144 | 145 | 146 | def serializeToXML(obj, context=None): 147 | if context is None: 148 | context = getDesignerContext() 149 | serializer = context.createSerializer() 150 | serializer.addObject(obj) 151 | return serializer.serializeXML() 152 | 153 | 154 | def stringify(obj): 155 | if isinstance(obj, (str, unicode)): 156 | return str(obj).replace('\r\n', '\n') 157 | elif isinstance(obj, (list, tuple)): 158 | return [stringify(item) for item in obj] 159 | elif isinstance(obj, dict): 160 | return dict((str(key),stringify(value)) 161 | for key, value 162 | in obj.items()) 163 | elif isinstance(obj, Date): 164 | return str(obj.toInstant()) # get the ISO8601 format 165 | # coerce java and other objects 166 | elif not isinstance(obj, (int, float, bool)): 167 | return repr(obj) 168 | return obj 169 | 170 | 171 | def yamlEncode(obj): 172 | return dump(stringify(obj), sort_keys=True, indent=4) 173 | 174 | 175 | def encode(obj): 176 | """ 177 | Encodes object in a serializing format. 178 | Returns tuple of serialization format's file extention and the serialized data. 179 | """ 180 | return '.yaml', yamlEncode(obj), 181 | # return '.json', system.util.jsonEncode(obj, 2), 182 | 183 | 184 | 185 | from com.inductiveautomation.ignition.common.xmlserialization import SerializationException 186 | 187 | def getSerializationCauses(exception): 188 | """Many objects may not be able to deserialize if imported from an 189 | Ignition instance with additional (but locally missing) modules. 190 | 191 | This will drag out some of the context in an easier to scan way. 192 | """ 193 | causes = [] 194 | while exception: 195 | causes.append(exception) 196 | exception = exception.getCause() 197 | return causes 198 | -------------------------------------------------------------------------------- /shared/tools/timing.py: -------------------------------------------------------------------------------- 1 | """ 2 | Timing helper functions 3 | 4 | These add the side effect of delay to for loops or exectution. 5 | 6 | Internally, the functions are millisecond-centric, but they maintain the 7 | API usage of normal Python calls by being outwardly second-centric. 8 | """ 9 | 10 | try: 11 | from java.lang.System import currentTimeMillis as now 12 | except ImportError: 13 | from time import time 14 | now = lambda: int(round(time() * 1000)) 15 | 16 | from time import sleep 17 | from datetime import datetime, timedelta 18 | 19 | 20 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 21 | __license__ = 'Apache 2.0' 22 | __maintainer__ = 'Andrew Geiger' 23 | __email__ = 'andrew.geiger@corsosystems.com' 24 | 25 | 26 | def waitForConditionOrTimeout(rising_edge_function, us_timeout=100000, _us_check_rate=1000, err_msg="Function check failed to be truthy in time."): 27 | """Spins the execution's wheels while we wait for a condition to become true.""" 28 | _us_check_rate /= 1000000.0 29 | timeout = datetime.now() + timedelta(microseconds=us_timeout) 30 | while not rising_edge_function() and datetime.now() < timeout: 31 | sleep(_us_check_rate) 32 | 33 | if rising_edge_function(): 34 | return 35 | else: 36 | raise TimeoutError(err_msg) 37 | 38 | 39 | class AtLeastThisDelay(object): 40 | """Force a with statement to take a minimum amount of time before 41 | returning execution to the thread. 42 | 43 | Time is in units of seconds. (Internally it is in milliseconds.) 44 | 45 | Test code to see how it works: 46 | with AtLeastThisDelay(1.0) as remainingTime: 47 | pass 48 | print 'done!' # will not print for one second 49 | """ 50 | def __init__(self, minClamp=0): 51 | self.delayMS = minClamp * 1000.0 52 | 53 | def __enter__(self): 54 | self.startTimeMS = now() 55 | self.endTimeMS = self.startTimeMS + self.delayMS 56 | return lambda : self.endTimeMS - now() 57 | 58 | def __exit__(self, exc_type, exc_val, exc_tb): 59 | timeRemainingMS = self.endTimeMS - now() 60 | if timeRemainingMS > 0: 61 | sleep(timeRemainingMS / 1000.0) 62 | 63 | 64 | class EveryFixedBeat(object): 65 | """Times a for loop to iterate on delay times. 66 | Think of it like a metronome: long iterations can cause beats to be missed! 67 | 68 | Use in a for loop, making each loop iterate ON the step times given 69 | (up to the max time provided). 70 | 71 | If the loop takes too long, it will skip the missed window and wait until the next. 72 | 73 | Time is in units of seconds. (Internally it is in milliseconds.) 74 | 75 | If steps are provided _instead_ of step times, then the needed step times 76 | will be calculated instead. 77 | 78 | for windowNumber,lastStepTime in EveryFixedDelay(1.0, 0.100): 79 | pass # iterates ten times 80 | for windowNumber,lastStepTime in EveryFixedDelay(1.0, numSteps=3): 81 | pass # iterates three times, about 333ms each 82 | """ 83 | def __init__(self, maxTime=0.0, stepTime=0.0, numSteps=0, initialIteration=True): 84 | 85 | maxTimeMS = maxTime * 1000.0 86 | stepTimeMS = stepTime * 1000.0 87 | 88 | if maxTimeMS and numSteps and not stepTimeMS: 89 | stepTimeMS = maxTimeMS / numSteps 90 | self.stepTimeMS = stepTimeMS 91 | self.maxTimeMS = maxTimeMS 92 | self.initialIteration = initialIteration 93 | 94 | self.startTimeMS = now() 95 | self.count = 0 96 | self.endTimeMS = self.startTimeMS + self.maxTimeMS 97 | self.lastStepTimeMS = self.startTimeMS 98 | 99 | def __iter__(self): 100 | if self.initialIteration: 101 | yield 0, 0 102 | 103 | while now() < self.endTimeMS: 104 | currentTimeMS = now() # keep it internally consistent, 105 | # but yield at end will be ever so slightly off 106 | 107 | nextStepNumber = (currentTimeMS + self.stepTimeMS - self.startTimeMS) // self.stepTimeMS 108 | nextStepTime = (nextStepNumber * self.stepTimeMS) + self.startTimeMS 109 | 110 | self.count = nextStepNumber 111 | 112 | if nextStepTime > self.endTimeMS: 113 | nextStepTime = self.endTimeMS 114 | 115 | # The extra +1ms here is to ensure we don't undershoot and loop too many times 116 | remainingWaitTime = nextStepTime - currentTimeMS + 1 117 | 118 | if remainingWaitTime > 0: 119 | sleep( remainingWaitTime / 1000.0 ) 120 | 121 | newCurrentTimeMS = now() 122 | lastStepDuration = (newCurrentTimeMS - self.lastStepTimeMS) / 1000.0 123 | self.lastStepTimeMS = newCurrentTimeMS 124 | 125 | 126 | yield self.count, lastStepDuration 127 | 128 | 129 | class EveryFixedDelay(object): 130 | """Times a for loop so that each step takes at least a certain delay. 131 | 132 | Use in a for loop, making each loop take at least the step time given 133 | (up to the max time provided). 134 | 135 | Time is in units of seconds. (Internally it is in milliseconds.) 136 | 137 | If a particular iteration takes a long time that merely delays the next iteration. 138 | If the current time exceeds the max time, the loop simply exits. 139 | 140 | If steps are provided _instead_ of step times, then the needed step times 141 | will be calculated instead. 142 | 143 | for iterNum,lastStepTime in EveryFixedDelay(1.0, 0.100): 144 | pass # iterates ten times 145 | for iterNum,lastStepTime in EveryFixedDelay(1.0, numSteps=3): 146 | pass # iterates three times, about 333ms each 147 | """ 148 | def __init__(self, maxTime=0.0, stepTime=0.0, numSteps=0, initialIteration=True): 149 | 150 | maxTimeMS = maxTime * 1000.0 151 | stepTimeMS = stepTime * 1000.0 152 | 153 | if maxTimeMS and numSteps and not stepTimeMS: 154 | stepTimeMS = maxTimeMS / (numSteps - initialIteration) 155 | self.stepTimeMS = stepTimeMS 156 | self.maxTimeMS = maxTimeMS 157 | self.initialIteration = initialIteration 158 | 159 | self.startTimeMS = now() 160 | self.count = -1 161 | self.endTimeMS = self.startTimeMS + self.maxTimeMS 162 | self.lastStepTimeMS = self.startTimeMS 163 | 164 | def __iter__(self): 165 | if self.initialIteration: 166 | self.count += 1 167 | yield 0, 0 168 | 169 | while now() < self.endTimeMS: 170 | currentTimeMS = now() # keep it internally consistent, 171 | # but yield at end will be ever so slightly off 172 | 173 | if self.lastStepTimeMS + self.stepTimeMS > self.endTimeMS: 174 | remainingWaitTime = self.startTimeMS + self.maxTimeMS - currentTimeMS 175 | else: 176 | # The extra +1ms here is to ensure we don't undershoot and loop too many times 177 | remainingWaitTime = self.lastStepTimeMS + self.stepTimeMS - currentTimeMS + 1 178 | 179 | if remainingWaitTime > 0: 180 | sleep(remainingWaitTime / 1000.0) 181 | 182 | self.count += 1 183 | newCurrentTimeMS = now() 184 | lastStepDuration = (newCurrentTimeMS - self.lastStepTimeMS) / 1000.0 185 | self.lastStepTimeMS = newCurrentTimeMS 186 | yield self.count, lastStepDuration 187 | 188 | 189 | #start = now() 190 | #print 'start %d' % start 191 | # 192 | ##for windowNumber,lastStepTime in EveryFixedRate(1.000, numSteps=3, initialIteration=True): 193 | ##for windowNumber,lastStepTime in EveryFixedRate(1.000, 0.300, initialIteration=False): 194 | ##for windowNumber,lastStepTime in EveryFixedRate(0, 0, initialIteration=False): 195 | ##for windowNumber,lastStepTime in EveryFixedRate(0, 0, initialIteration=True): 196 | ##for windowNumber,lastStepTime in EveryFixedDelay(0, 0, initialIteration=False): 197 | ##for windowNumber,lastStepTime in EveryFixedDelay(0, 0, initialIteration=True): 198 | ## print '%3d %5d %d' % (windowNumber, lastStepTime, now() - start) 199 | ## if windowNumber == 1: 200 | ## sleep(0.4) 201 | # 202 | ##for iterNum,lastStepTime in EveryFixedDelay(1.000, numSteps=3, initialIteration=True): 203 | ##for iterNum,lastStepTime in EveryFixedDelay(1.000, numSteps=3, initialIteration=False): 204 | ##for iterNum,lastStepTime in EveryFixedDelay(1.000, 0.250, initialIteration=True): 205 | ##for iterNum,lastStepTime in EveryFixedDelay(1.000, 0.300, initialIteration=True): 206 | ##for iterNum,lastStepTime in EveryFixedDelay(1.000, 0.300, initialIteration=False): 207 | ## print '%3d %5d %d' % (iterNum, lastStepTime, now() - start) 208 | ## if iterNum == 1: 209 | ## sleep(0.4) 210 | #end = now() 211 | #print 'done %d' % end 212 | #print 'total %d' % (end - start) 213 | -------------------------------------------------------------------------------- /shared/tools/venv.py: -------------------------------------------------------------------------------- 1 | """ 2 | Virtual environment bootstrapping 3 | """ 4 | 5 | 6 | import sys,imp 7 | 8 | 9 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 10 | __license__ = 'Apache 2.0' 11 | __maintainer__ = 'Andrew Geiger' 12 | __email__ = 'andrew.geiger@corsosystems.com' 13 | 14 | 15 | 16 | #from shared.tools.meta import currentStackDepth 17 | def currentStackDepth(): 18 | frame = sys._getframe(0) 19 | i = 0 20 | while frame: 21 | frame = frame.f_back 22 | i += 1 23 | return i 24 | 25 | 26 | class Venv(object): 27 | """Hoists a block of code as a module. 28 | Use to test and virtually bootstrap code in environments where globals can not be easily modified. 29 | 30 | >>> # Example for generating scope: (the commented lines generate specific errors) 31 | >>> from __future__ import with_statement 32 | >>> import sys 33 | >>> def createScopeInFunction(): 34 | ... modGen = Venv('examples.venv.functionScope').anchorModuleStart() 35 | ... def foo(): 36 | ... print 'foo!' 37 | ... modGen.anchorModuleEnd().bootstrapModule() 38 | >>> createScopeInFunction() # uncomment to fix ImportError on examples 39 | >>> from examples.venv.functionScope import foo # uncomment to fix NameError on foo 40 | >>> foo() 41 | foo! 42 | >>> # Example in using it as an in-line virtual environment: 43 | >>> def createEnvironment(): 44 | ... venv = Venv('examples.venv.presetScope').anchorModuleStart() 45 | ... def bar(): 46 | ... print 'bar!' 47 | ... venv.anchorModuleEnd() 48 | ... return venv 49 | >>> 50 | >>> with createEnvironment(): 51 | ... from examples.venv.presetScope import bar 52 | ... abc = 123 53 | ... bar() 54 | bar! 55 | """ 56 | 57 | def __init__(self, modulePath=None, overwriteInterlock=False): 58 | self._setCallingContext() 59 | 60 | if modulePath in self._getCallingFrameSys().modules.keys(): 61 | if not overwriteInterlock: 62 | raise ImportError('Venv interlocked because module already exists in scope: %s' % modulePath) 63 | 64 | self.modulePath = modulePath 65 | self.package = {} 66 | self._createdScope = [] 67 | 68 | def _setCallingContext(self, relativeOverride=0): 69 | self._callingStackDepth = currentStackDepth() - 1 - 1 + relativeOverride 70 | 71 | def _getCallingFrame(self): 72 | return sys._getframe(currentStackDepth() - self._callingStackDepth) 73 | 74 | def _getCallingFrameSys(self): 75 | frame = self._getCallingFrame() 76 | scopedSys = frame.f_locals.get('sys', frame.f_globals.get('sys',None)) 77 | if scopedSys is None: 78 | raise NameError('Venv needs sys to be imported into scope _before_ running!') 79 | return scopedSys 80 | 81 | def anchorModuleStart(self): 82 | f_locals = self._getCallingFrame().f_locals 83 | self.startingSnapshot = frozenset(f_locals.keys()) 84 | return self 85 | 86 | def anchorModuleEnd(self): 87 | f_locals = self._getCallingFrame().f_locals 88 | self.endingSnapshot = frozenset(f_locals.keys()) 89 | addedItems = self.endingSnapshot.difference(self.startingSnapshot) 90 | self.package.update(dict([(item,f_locals[item]) for item in addedItems])) 91 | return self 92 | 93 | @staticmethod 94 | def _initializeNewModule(modulePath): 95 | """see https://github.com/reingart/pymotw3/blob/master/source/sys/sys_meta_path.py""" 96 | mod = imp.new_module(modulePath) 97 | mod.__file__ = modulePath 98 | mod.__name__ = modulePath 99 | mod.__package__ = '.'.join(modulePath.split('.')[:1]) 100 | return mod 101 | 102 | def bootstrapModule(self): 103 | modulePathParts = self.modulePath.split('.') 104 | supportingPackages = ['.'.join(modulePathParts[0:i]) for i in range(1,len(modulePathParts))] 105 | 106 | for parentPath in supportingPackages: 107 | if not parentPath in sys.modules: 108 | newModule = self._initializeNewModule(parentPath) 109 | sys.modules[parentPath] = newModule 110 | self._createdScope.append(parentPath) 111 | 112 | newModule = self._initializeNewModule(self.modulePath) 113 | 114 | for key,value in self.package.items(): 115 | setattr(newModule, key, value) 116 | sys.modules[self.modulePath] = newModule 117 | self._createdScope.append(self.modulePath) 118 | 119 | # chain connections 120 | if '.' in self.modulePath: 121 | parentModule = sys.modules[modulePathParts[0]] 122 | packageChain = supportingPackages[:] 123 | packageChain.append(self.modulePath) 124 | 125 | parentModule = sys.modules[packageChain[0]] 126 | for modulePath in packageChain[1:]: 127 | childModule = sys.modules[modulePath] 128 | childModuleName = modulePath.rpartition('.')[2] 129 | try: 130 | assert childModule == getattr(parentModule, childModuleName) 131 | except AttributeError: 132 | setattr(parentModule, childModuleName, childModule) 133 | 134 | parentModule = childModule 135 | 136 | def _purgeScope(self): 137 | scopedSys = self._getCallingFrameSys() 138 | for modulePath in reversed(sorted(self._createdScope)): 139 | _ = scopedSys.modules.pop(modulePath) 140 | 141 | def __enter__(self): 142 | self.bootstrapModule() 143 | self._setCallingContext() 144 | _ = self.anchorModuleStart() 145 | 146 | def __exit__(self, exc_type, exc_val, exc_tb): 147 | self._purgeScope() 148 | self.package = {} 149 | _ = self.anchorModuleEnd() 150 | f_locals = self._getCallingFrame().f_locals 151 | for key,value in self.package.items(): 152 | f_locals.pop(key) 153 | -------------------------------------------------------------------------------- /shared/tools/wrapped.py: -------------------------------------------------------------------------------- 1 | """ 2 | Wrapped allows one to override/extend classes and objects even if 3 | there are weird constraints (like metaclasses). 4 | """ 5 | 6 | __copyright__ = """Copyright (C) 2020 Corso Systems""" 7 | __license__ = 'Apache 2.0' 8 | __maintainer__ = 'Andrew Geiger' 9 | __email__ = 'andrew.geiger@corsosystems.com' 10 | 11 | 12 | class Wrapped(object): 13 | """Faking inheritance using guide rails. 14 | 15 | Some classes are complex and have sophisticated metaclasses. 16 | This class allows for wrapping such a thing transparently. 17 | For all intents and purposes, it will look and act like the wrapped type, 18 | but will not interfere with the metaclass's operations. 19 | 20 | IMPORTANT: Any subclass of this _must_ set the `_type` attribute! 21 | This is the class that would otherwise have been inherited from. 22 | """ 23 | 24 | __slots__ = ('_self',) 25 | 26 | _type = None 27 | 28 | def __init__(self, *args, **kwargs): 29 | """There's a lot of metamagic in the autogenerated. 30 | We don't want to interfere with that, so we're wrapping the object instead. 31 | """ 32 | self._self = self._type(*args, **kwargs) 33 | 34 | def __getattr__(self, attribute): 35 | """Get from this class first, otherwise use the wrapped item.""" 36 | try: 37 | return super(Wrapped, self).__getattr__(attribute) 38 | except AttributeError: 39 | return getattr(self._self, attribute) 40 | 41 | def __setattr__(self, attribute, value): 42 | """Set to this class first, otherwise use the wrapped item.""" 43 | try: 44 | return super(Wrapped, self).__setattr__(attribute, value) 45 | except AttributeError: 46 | return setattr(self._self, attribute, value) 47 | 48 | -------------------------------------------------------------------------------- /test/shared/tools/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CorsoSource/metatools/a4252820c42c5907bb08cfef4b5817362aa6a09c/test/shared/tools/__init__.py -------------------------------------------------------------------------------- /test/shared/tools/test_data.py: -------------------------------------------------------------------------------- 1 | import unittest, doctest 2 | 3 | from org.apache.commons.lang3.time import DateUtils 4 | from java.util import Date 5 | 6 | from shared.tools.data import * 7 | 8 | 9 | doctest.run_docstring_examples(datasetToListDict,globals()) 10 | doctest.run_docstring_examples(datasetToDictList,globals()) 11 | doctest.run_docstring_examples(gatherKeys,globals()) 12 | doctest.run_docstring_examples(listDictToDataset,globals()) 13 | 14 | 15 | class RecordSetTestCase(unittest.TestCase): 16 | 17 | def setUp(self): 18 | self.columnNames = 'c1 c2 c3'.split() 19 | self.numColumns = len(self.columnNames) 20 | self.numRows = 4 21 | self.RecordSet = genRecordSet(self.columnNames) 22 | 23 | def tearDown(self): 24 | pass 25 | 26 | # Test different inputs 27 | def test_readListOfLists(self): 28 | # generate source data 29 | listOfLists= [list(range(i,i+self.numColumns)) 30 | for i in range(1,self.numRows*self.numColumns,self.numColumns)] 31 | 32 | # generate test data 33 | listOfRecordSets = [self.RecordSet(row) for row in listOfLists] 34 | 35 | # check dimensions 36 | self.assertTrue(all(len(listOfRecordSets[i].keys()) for i in range(self.numColumns))) 37 | self.assertEqual(len(listOfRecordSets), self.numRows) 38 | 39 | # verify data imported correctly 40 | for lotRow,lorsRow in zip(listOfLists, listOfRecordSets): 41 | self.assertEqual(lotRow,list(lorsRow)) 42 | 43 | # Test different inputs 44 | def test_readListOfTuples(self): 45 | # generate source data 46 | listOfTuples = [tuple(range(i,i+self.numColumns)) 47 | for i in range(1,self.numRows*self.numColumns,self.numColumns)] 48 | 49 | # generate test data 50 | listOfRecordSets = [self.RecordSet(row) for row in listOfTuples] 51 | 52 | # check dimensions 53 | self.assertTrue(all(len(listOfRecordSets[i].keys()) for i in range(self.numColumns))) 54 | self.assertEqual(len(listOfRecordSets), self.numRows) 55 | 56 | # verify data imported correctly 57 | for lotRow,lorsRow in zip(listOfTuples, listOfRecordSets): 58 | self.assertEqual(lotRow,tuple(lorsRow)) 59 | 60 | 61 | suite = unittest.TestLoader().loadTestsFromTestCase(RecordSetTestCase) 62 | 63 | unittest.TextTestRunner(verbosity=2).run(suite) -------------------------------------------------------------------------------- /test/shared/tools/test_logging.py: -------------------------------------------------------------------------------- 1 | import unittest, doctest 2 | 3 | from shared.tools.logging import BaseLogger 4 | 5 | doctest.run_docstring_examples(BaseLogger()._generateMessage,globals()) 6 | doctest.run_docstring_examples(BaseLogger()._formatString, globals(), optionflags=doctest.ELLIPSIS) 7 | doctest.run_docstring_examples(BaseLogger()._bracketString, globals()) -------------------------------------------------------------------------------- /test/shared/tools/test_meta.py: -------------------------------------------------------------------------------- 1 | import unittest, doctest 2 | 3 | from shared.tools.meta import sentinel, getFunctionCallSigs 4 | 5 | doctest.run_docstring_examples(sentinel,globals()) 6 | doctest.run_docstring_examples(getFunctionCallSigs,globals()) 7 | 8 | 9 | from shared.tools.meta import currentStackDepth, getObjectByName, getObjectName 10 | 11 | 12 | class ObjectSearchTestCase(unittest.TestCase): 13 | 14 | def test_stackSearch(self): 15 | # Generate a stack search 16 | def foo(): 17 | x = 33 18 | def bar(): 19 | y = 2 20 | def baz(): 21 | z = 3 22 | x = 725 23 | 24 | currentDepth = currentStackDepth() 25 | 26 | # Start in this stack frame, go into the past 27 | self.assertEqual(725, getObjectByName('x')) 28 | # Start at the deepest past, and go towards the current stack frame 29 | self.assertEqual(33, getObjectByName('x', startRecent=False)) 30 | # Start at the deepest past and go deeper (before foo was defined!) 31 | self.assertEqual(None, getObjectByName('x', currentDepth)) 32 | # start at the deepest past and come towards the current stack frame 33 | self.assertEqual(33, getObjectByName('x', currentDepth, startRecent=False)) 34 | 35 | self.assertEqual('foo', getObjectName(foo)) 36 | baz() 37 | bar() 38 | foo() 39 | 40 | def test_PythonFunctionSigs(self): 41 | # Generate a few different functions to verify signatures. 42 | def fun1(): 43 | pass 44 | def fun2(x,y,z=5): 45 | pass 46 | self.assertEqual('()', getFunctionCallSigs(fun1)) 47 | self.assertEqual('(x, y, z=5)', getFunctionCallSigs(fun2)) 48 | 49 | def test_JavaFunctionSigs(self): 50 | from java.util import Random 51 | 52 | # Check the no args case 53 | self.assertEqual('()', getFunctionCallSigs(Random().nextBoolean)) 54 | # Check the single call method case 55 | self.assertEqual('()', getFunctionCallSigs(Random().setSeed)) 56 | # Check the many ways to call case 57 | self.assertEqual('() -OR- () -OR- (, ) -OR- (, , )', getFunctionCallSigs(Random().ints)) 58 | # Try a different join method 59 | self.assertEqual('()|()|(, )|(, , )', getFunctionCallSigs(Random().ints, joinClause='|')) 60 | 61 | 62 | suite = unittest.TestLoader().loadTestsFromTestCase(ObjectSearchTestCase) 63 | unittest.TextTestRunner(verbosity=2).run(suite) 64 | -------------------------------------------------------------------------------- /test/shared/tools/test_thread.py: -------------------------------------------------------------------------------- 1 | import unittest, doctest 2 | 3 | 4 | from shared.coros.thread import async 5 | 6 | 7 | doctest.run_docstring_examples(async, globals(), optionflags=doctest.ELLIPSIS) -------------------------------------------------------------------------------- /test/shared/tools/test_venv.py: -------------------------------------------------------------------------------- 1 | import unittest, doctest 2 | 3 | import sys 4 | 5 | from shared.tools.venv import Venv 6 | 7 | 8 | # doctest.run_docstring_examples(Venv, globals(), optionflags=doctest.ELLIPSIS) 9 | 10 | 11 | class VenvTestCases(unittest.TestCase): 12 | 13 | @staticmethod 14 | def _createScopeInFunction(): 15 | modGen = Venv('examples.venv.functionScope').anchorModuleStart() 16 | def foo(): 17 | return 'foo!' 18 | modGen.anchorModuleEnd().bootstrapModule() 19 | 20 | def test_bootstrapInFunction(self): 21 | def importForScopeInFunction(): 22 | from examples.venv.functionScope import foo 23 | 24 | self.assertRaises(ImportError, importForScopeInFunction) 25 | self._createScopeInFunction() 26 | self.assertNotIn('foo', locals()) 27 | from examples.venv.functionScope import foo 28 | self.assertEqual(foo(), 'foo!') 29 | 30 | def test_withStatementEnvironment(self): 31 | def createEnvironment(): 32 | venv = Venv('examples.venv.presetScope').anchorModuleStart() 33 | def bar(): 34 | return 'bar!' 35 | venv.anchorModuleEnd() 36 | return venv 37 | 38 | def importForWithStatementIsolation(): 39 | from examples.venv.presetScope import bar 40 | def callBar(): 41 | bar() 42 | self.assertRaises(ImportError, importForWithStatementIsolation) 43 | self.assertNotIn('bar', locals()) 44 | 45 | with createEnvironment(): 46 | from examples.venv.presetScope import bar 47 | self.assertEqual(bar(), 'bar!') 48 | abc = 123 49 | 50 | self.assertNotIn('bar', locals()) 51 | self.assertNotIn('abc', locals()) 52 | self.assertRaises(ImportError, importForWithStatementIsolation) 53 | 54 | 55 | suite = unittest.TestLoader().loadTestsFromTestCase(VenvTestCases) 56 | unittest.TextTestRunner(verbosity=1).run(suite) 57 | --------------------------------------------------------------------------------