├── tests ├── __init__.py ├── test_map.py ├── test_read.py ├── test_cksum.py └── test_parts.py ├── requirements.txt ├── MANIFEST.in ├── docker ├── build.sh └── run.sh ├── dev-requirements.txt ├── data ├── demo_ab.sor ├── sample1310_lowDR.sor ├── M200_Sample_005_S13.sor ├── demo_ab-dump.xml ├── M200_Sample_005_S13-dump.json ├── demo_ab-dump.json └── sample1310_lowDR-dump.json ├── pyotdr ├── __init__.py ├── dump.py ├── main.py ├── cksum.py ├── supparams.py ├── read.py ├── mapblock.py ├── datapts.py ├── parts.py ├── genparams.py ├── keyevents.py └── fxdparams.py ├── Dockerfile ├── setup.cfg ├── .github └── workflows │ └── test.yml ├── Makefile ├── .gitignore ├── setup.py ├── README.md └── LICENSE /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | crcmod 2 | dicttoxml -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include requirements.txt 2 | -------------------------------------------------------------------------------- /docker/build.sh: -------------------------------------------------------------------------------- 1 | docker build . -t sidneyli/pyotdr:latest 2 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | black 3 | mypy 4 | twine 5 | build 6 | -------------------------------------------------------------------------------- /data/demo_ab.sor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sid5432/pyOTDR/HEAD/data/demo_ab.sor -------------------------------------------------------------------------------- /pyotdr/__init__.py: -------------------------------------------------------------------------------- 1 | from .read import sorparse 2 | 3 | __version__ = "2.1.1" 4 | -------------------------------------------------------------------------------- /docker/run.sh: -------------------------------------------------------------------------------- 1 | docker run -ti -v /home/sid/:/data sidneyli/pyotdr:latest /bin/bash 2 | -------------------------------------------------------------------------------- /data/sample1310_lowDR.sor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sid5432/pyOTDR/HEAD/data/sample1310_lowDR.sor -------------------------------------------------------------------------------- /data/M200_Sample_005_S13.sor: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sid5432/pyOTDR/HEAD/data/M200_Sample_005_S13.sor -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9-slim 2 | 3 | WORKDIR /pyotdr 4 | COPY requirements.txt . 5 | RUN pip install -r requirements.txt 6 | COPY . . 7 | RUN pip install . 8 | 9 | 10 | CMD ["pyotdr"] 11 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal=1 3 | 4 | [mypy] 5 | mypy_path = stubs 6 | ignore_missing_imports = True 7 | 8 | [options.entry_points] 9 | console_scripts = 10 | pyOTDR=pyotdr.main:main 11 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Python test and build 2 | on: [push, pull_request] 3 | 4 | jobs: 5 | build: 6 | runs-on: ubuntu-latest 7 | strategy: 8 | matrix: 9 | python-version: [3.8, 3.9, 3.10, 3.11, 3.12] 10 | steps: 11 | - uses: actions/checkout@v1 12 | - name: setup python ${{ matrix.python-version }} 13 | uses: actions/setup-python@v5 14 | with: 15 | python-version: ${{ matrix.python-version }} 16 | - name: Install dependencies 17 | run: | 18 | pip install -r requirements.txt -r dev-requirements.txt 19 | - name: is code properly linted 20 | run: black --check . 21 | - name: type checking 22 | run: mypy . 23 | - name: run tests 24 | run: pytest tests 25 | 26 | 27 | -------------------------------------------------------------------------------- /tests/test_map.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | cdir = os.path.dirname(os.path.realpath(__file__)) 4 | 5 | from pyotdr.read import sorparse 6 | from pyotdr import parts 7 | 8 | 9 | def test_map(): 10 | filename = cdir + "/../data/demo_ab.sor" 11 | fh = parts.sorfile(filename) 12 | assert fh != None 13 | fh.close() 14 | 15 | status, results, trace = sorparse(filename) 16 | 17 | assert status == "ok" 18 | 19 | # map block 20 | ref = results["blocks"] 21 | assert ref["Cksum"]["pos"] == 25706 22 | assert ref["Cksum"]["version"] == "1.00" 23 | 24 | assert ref["DataPts"]["pos"] == 328 25 | assert ref["DataPts"]["size"] == 23564 26 | 27 | return 28 | 29 | 30 | # ================================== 31 | if __name__ == "__main__": 32 | test_map() 33 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | clean: 2 | rm -f Makefile.bak *-trace.dat *~ */*~ *-dump.json 3 | 4 | realclean: clean 5 | rm -rf *.json *.xml pyotdr/*.pyc *.pyc test/*.pyc */__pycache__ __pycache__ .cache tests/.cache 6 | rm -rf build dist pyotdr.egg-info 7 | 8 | build: realclean 9 | python -m build 10 | 11 | dist: build 12 | python setup.py sdist bdist_wheel 13 | 14 | upload: dist 15 | twine upload dist/* 16 | 17 | install: build 18 | python -m build 19 | 20 | docker-build: 21 | docker build . -t sidneyli/pyotdr:latest 22 | 23 | docker-run: 24 | echo "NOTE: not checking if docker image exists already!" 25 | docker run -ti --rm -u `id -u`:`id -g` -v $(HOME):/data sidneyli/pyotdr:latest /bin/bash 26 | 27 | test: testall 28 | 29 | testall: 30 | echo "run tests in tests/" 31 | pytest 32 | 33 | doc: 34 | pandoc README.md -o README.rst 35 | 36 | html: doc 37 | rst2html5.py README.rst > README.html 38 | 39 | test1: 40 | ./pyotdr/main.py data/demo_ab.sor 41 | 42 | test2: 43 | ./pyotdr/main.py data/sample1310_lowDR.sor 44 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | *~ 7 | .idea/ 8 | 9 | .DS_Store 10 | 11 | # Distribution / packaging 12 | .Python 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | .hypothesis/ 50 | .mypy_cache 51 | 52 | 53 | # Sphinx documentation 54 | docs/_build/ 55 | 56 | # PyBuilder 57 | target/ 58 | 59 | # pyenv 60 | .python-version 61 | 62 | # Environments 63 | .env 64 | .venv 65 | env/ 66 | venv/ 67 | ENV/ 68 | env.bak/ 69 | venv.bak/ 70 | 71 | 72 | # Rope project settings 73 | .ropeproject 74 | 75 | # mkdocs documentation 76 | /site 77 | 78 | 79 | -------------------------------------------------------------------------------- /pyotdr/dump.py: -------------------------------------------------------------------------------- 1 | import json 2 | from enum import Enum 3 | from functools import reduce 4 | from dicttoxml import dicttoxml 5 | 6 | 7 | class ExportDataType(Enum): 8 | JSON = "JSON" 9 | XML = "XML" 10 | 11 | def __str__(self) -> str: 12 | return self.value 13 | 14 | 15 | def replace_keys(results): 16 | newresults = {} 17 | for key in results.keys(): 18 | newkey = reduce(lambda x, y: x.replace(y, "_"), [" ", "/", "(", ")"], key) 19 | 20 | newresults[newkey] = results[key] 21 | if type(newresults[newkey]) is dict: 22 | newresults[newkey] = replace_keys(newresults[newkey]) 23 | 24 | return newresults 25 | 26 | 27 | def tofile(results, logfile, format: ExportDataType = ExportDataType.JSON): 28 | """ 29 | dump results to file (specifiled by file handle logfile) 30 | """ 31 | 32 | if format == ExportDataType.JSON: 33 | json.dump(results, logfile, sort_keys=True, indent=8, separators=(",", ": ")) 34 | elif format == ExportDataType.XML: 35 | newresults = replace_keys(results) 36 | logfile.write( 37 | dicttoxml(newresults, custom_root="sor", attr_type=False).decode("utf-8") 38 | ) 39 | else: 40 | raise ValueError("Format has to be JSON or XML") 41 | -------------------------------------------------------------------------------- /pyotdr/main.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | import argparse 4 | from pyotdr.dump import tofile, ExportDataType 5 | from pyotdr.read import sorparse 6 | 7 | logging.basicConfig(format="%(message)s") 8 | logger = logging.getLogger(__name__) 9 | LOG_LEVEL = os.getenv("LOG_LEVEL", "DEBUG") 10 | logger.setLevel(LOG_LEVEL) 11 | 12 | 13 | def main(): 14 | parser = argparse.ArgumentParser() 15 | parser.add_argument("SOR_file", type=str, help="Name of the sor file to transform") 16 | parser.add_argument( 17 | "format", 18 | type=ExportDataType, 19 | choices=list(ExportDataType), 20 | default=ExportDataType.JSON, 21 | help="Output format : JSON or XML", 22 | nargs="?", 23 | ) 24 | args = parser.parse_args() 25 | 26 | logging.basicConfig(format="%(message)s") 27 | root_logger = logging.getLogger("pyotdr") 28 | root_logger.setLevel(LOG_LEVEL) 29 | 30 | filename = args.SOR_file 31 | opformat = ExportDataType(args.format) 32 | 33 | _, results, tracedata = sorparse(filename) 34 | 35 | # construct data file name to dump results 36 | fn_strip, _ = os.path.splitext(os.path.basename(filename)) 37 | datafile = fn_strip + "-dump." + str(opformat).lower() 38 | 39 | with open(datafile, "w") as output: 40 | tofile(results, output, format=opformat) 41 | 42 | # construct data file name 43 | fn_strip, _ = os.path.splitext(os.path.basename(filename)) 44 | opfile = fn_strip + "-trace.dat" 45 | 46 | with open(opfile, "w") as output: 47 | for xy in tracedata: 48 | output.write(xy) 49 | -------------------------------------------------------------------------------- /pyotdr/cksum.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from . import parts 3 | 4 | logger = logging.getLogger(__name__) 5 | 6 | 7 | def process(fh, results, debug=False): 8 | """ 9 | fh: file handle; 10 | results: dict for results; 11 | 12 | we assume mapblock.process() has already been run 13 | """ 14 | bname = "Cksum" 15 | hsize = len(bname) + 1 # include trailing '\0' 16 | pname = "Cksum.process():" 17 | sep = " :" 18 | status = "nok" 19 | 20 | try: 21 | ref = results["blocks"][bname] 22 | startpos = ref["pos"] 23 | fh.seek(startpos) 24 | except: 25 | logger.error("{} {} block starting position unknown ".format(pname, bname)) 26 | return status 27 | 28 | format = results["format"] 29 | 30 | if format == 2: 31 | mystr = fh.read(hsize).decode("ascii") 32 | if mystr != bname + "\0": 33 | logger.error("{} incorrect header {}".format(pname, mystr)) 34 | return status 35 | 36 | results[bname] = dict() 37 | xref = results[bname] 38 | 39 | # before reading the (file) checksum, get the cumulative checksum 40 | xref["checksum_ours"] = digest = fh.digest() 41 | csum = xref["checksum"] = parts.get_uint(fh, 2) 42 | 43 | if digest == csum: 44 | xref["match"] = True 45 | verdict = "MATCHES!" 46 | else: 47 | xref["match"] = False 48 | verdict = "DOES NOT MATCH!" 49 | 50 | logger.debug("%s checksum from file %d (0x%X)" % (sep, csum, csum)) 51 | logger.debug("%s checksum calculated %d (0x%X) %s" % (sep, digest, digest, verdict)) 52 | 53 | status = "ok" 54 | return status 55 | -------------------------------------------------------------------------------- /tests/test_read.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | 4 | cdir = os.path.dirname(os.path.realpath(__file__)) 5 | 6 | 7 | from pyotdr.read import sorparse 8 | from pyotdr import parts 9 | 10 | 11 | def ordered(obj): 12 | if isinstance(obj, dict): 13 | return sorted((k, ordered(v)) for k, v in list(obj.items())) 14 | if isinstance(obj, list): 15 | return sorted(ordered(x) for x in obj) 16 | else: 17 | return obj 18 | 19 | 20 | def _compare_(sor_filename): 21 | 22 | filename = cdir + "/../data/" + sor_filename 23 | fh = parts.sorfile(filename) 24 | assert fh != None 25 | fh.close() 26 | 27 | status, results, tracedata = sorparse(filename) 28 | 29 | assert status == "ok" 30 | 31 | # load and compare JSON file 32 | fn_strip, ext = os.path.splitext(os.path.basename(filename)) 33 | datafile = fn_strip + "-dump.json" 34 | 35 | jsonfile = cdir + "/../data/" + datafile 36 | 37 | with open(jsonfile) as jsf: 38 | jold = dict(json.load(jsf)) 39 | 40 | jnew = json.dumps(results, sort_keys=True) 41 | jnew = json.loads(jnew) 42 | 43 | jold = ordered(jold) 44 | jnew = ordered(jnew) 45 | 46 | assert jold == jnew 47 | 48 | # load and compare trace data 49 | tfile = fn_strip + "-trace.dat" 50 | tfile = cdir + "/../data/" + tfile 51 | 52 | with open(tfile) as jsf: 53 | count = 0 54 | for line in jsf: 55 | assert line.strip("\n") == tracedata[count].strip("\n") 56 | count += 1 57 | 58 | return 59 | 60 | 61 | def test_read1(): 62 | _compare_("demo_ab.sor") 63 | return 64 | 65 | 66 | def test_read2(): 67 | _compare_("sample1310_lowDR.sor") 68 | return 69 | 70 | 71 | def test_read3(): 72 | _compare_("M200_Sample_005_S13.sor") 73 | return 74 | -------------------------------------------------------------------------------- /tests/test_cksum.py: -------------------------------------------------------------------------------- 1 | import os 2 | import crcmod 3 | 4 | from pyotdr.read import sorparse 5 | 6 | cdir = os.path.dirname(os.path.realpath(__file__)) 7 | 8 | 9 | def crc16_ccitt(data): 10 | """ 11 | Calculate the CRC16 CCITT checksum of *data*. 12 | 13 | (CRC16 CCITT: start 0xFFFF, poly 0x1021) 14 | same as: 15 | 16 | crcmod.mkCrcFun( 0x11021, initCrc=0xFFFF, xorOut=0x0000, rev=False) 17 | """ 18 | crc16 = crcmod.predefined.mkCrcFun("crc-ccitt-false") 19 | digest = crc16(data) 20 | return digest 21 | 22 | 23 | def test_cksum(): 24 | # sanity check algorithm 25 | digest = crc16_ccitt(b"123456789") 26 | 27 | assert digest == 0x29B1 28 | 29 | filename = cdir + "/../data/demo_ab.sor" 30 | with open(filename, mode="rb") as fh: 31 | data = fh.read() 32 | 33 | assert len(data) == 25708 34 | 35 | file_chk = data[-1] * 256 + data[-2] 36 | 37 | assert file_chk == 38827 38 | 39 | newdata = data[0:-2] 40 | 41 | # print "* trunc size is ",len(newdata) 42 | 43 | digest = crc16_ccitt(newdata) 44 | 45 | assert digest == file_chk 46 | 47 | devnull = open(os.devnull, "w") 48 | # test against module (SOR version 1) 49 | status, results, tracedata = sorparse(filename) 50 | # print(results) 51 | # print "* Our calcuated check sum: ",digest 52 | assert results["Cksum"]["checksum_ours"] == digest 53 | 54 | # print("--------------- ok version 1 -----------------------") 55 | 56 | # SOR version 2 57 | filename = cdir + "/../data/sample1310_lowDR.sor" 58 | status, results, tracedata = sorparse(filename) 59 | 60 | assert results["Cksum"]["checksum_ours"] == 62998 61 | assert results["Cksum"]["checksum"] == 59892 62 | 63 | # print("--------------- ok version 2 -----------------------") 64 | 65 | return 66 | 67 | 68 | # ========================================== 69 | if __name__ == "__main__": 70 | test_cksum() 71 | -------------------------------------------------------------------------------- /pyotdr/supparams.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from . import parts 3 | 4 | logger = logging.getLogger(__name__) 5 | 6 | sep = " :" 7 | 8 | 9 | def process(fh, results): 10 | """ 11 | fh: file handle; 12 | results: dict for results; 13 | 14 | we assume mapblock.process() has already been run 15 | """ 16 | bname = "SupParams" 17 | hsize = len(bname) + 1 # include trailing '\0' 18 | pname = "SupParams.process():" 19 | ref = None 20 | status = "nok" 21 | 22 | try: 23 | ref = results["blocks"][bname] 24 | startpos = ref["pos"] 25 | fh.seek(startpos) 26 | except: 27 | logger.debug("{} {} block starting position unknown".format(pname, bname)) 28 | return status 29 | 30 | format = results["format"] 31 | 32 | if format == 2: 33 | mystr = fh.read(hsize).decode("ascii") 34 | if mystr != bname + "\0": 35 | logger.error("{} incorrect header {}".format(pname, mystr)) 36 | return status 37 | 38 | results[bname] = dict() 39 | 40 | # version 1 and 2 are the same 41 | status = process_supparam(fh, results) 42 | 43 | # read the rest of the block (just in case) 44 | #endpos = results["blocks"][bname]["pos"] + results["blocks"][bname]["size"] 45 | #fh.read(endpos - fh.tell()) 46 | status = "ok" 47 | return status 48 | 49 | 50 | # ================================================================ 51 | def process_supparam(fh, results): 52 | """process SupParams fields""" 53 | bname = "SupParams" 54 | xref = results[bname] 55 | 56 | fields = ( 57 | "supplier", # ............. 0 58 | "OTDR", # ................. 1 59 | "OTDR S/N", # ............. 2 60 | "module", # ............... 3 61 | "module S/N", # ........... 4 62 | "software", # ............. 5 63 | "other", # ................ 6 64 | ) 65 | 66 | count = 0 67 | for field in fields: 68 | xstr = parts.get_string(fh) 69 | logger.debug("{} {}. {}: {}".format(sep, count, field, xstr)) 70 | 71 | xref[field] = xstr 72 | count += 1 73 | 74 | status = "ok" 75 | 76 | return status 77 | -------------------------------------------------------------------------------- /pyotdr/read.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | 4 | from . import parts 5 | from . import mapblock 6 | from . import genparams 7 | from . import supparams 8 | from . import fxdparams 9 | from . import keyevents 10 | from . import datapts 11 | from . import cksum 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | # ----------------------------------------------------- 16 | def sorparse(filename): 17 | """ 18 | parse SOR file; 19 | return status and result (dictionary) 20 | """ 21 | fh = parts.sorfile(filename) 22 | if fh == None: 23 | return "Error opening file", None, None 24 | 25 | results = dict() 26 | status = "ok" 27 | 28 | results["filename"] = os.path.basename(filename) 29 | 30 | tracedata = [] 31 | 32 | # map block ------------------------------- 33 | status = mapblock.process(fh, results) 34 | if status != "ok": 35 | return status, results, tracedata 36 | 37 | # all the other blocks -------------------- 38 | klist = sorted(results["blocks"], key=lambda x: results["blocks"][x]["order"]) 39 | 40 | for bname in klist: 41 | ref = results["blocks"][bname] 42 | bname = ref["name"] 43 | bsize = ref["size"] 44 | start = ref["pos"] 45 | 46 | logger.debug( 47 | "\nMAIN: {} block: {:d} bytes, start pos {:#X} ({:d})".format( 48 | bname, bsize, start, start 49 | ) 50 | ) 51 | 52 | if bname == "GenParams": 53 | status = genparams.process(fh, results) 54 | elif bname == "SupParams": 55 | status = supparams.process(fh, results) 56 | elif bname == "FxdParams": 57 | status = fxdparams.process(fh, results) 58 | elif bname == "DataPts": 59 | status = datapts.process(fh, results, tracedata) 60 | elif bname == "KeyEvents": 61 | status = keyevents.process(fh, results) 62 | elif bname == "Cksum": 63 | status = cksum.process(fh, results) 64 | else: 65 | parts.slurp(fh, bname, results) 66 | status = "ok" 67 | pass 68 | 69 | # stop immediately if any errors 70 | if status != "ok": 71 | break 72 | 73 | # ................................... 74 | fh.close() 75 | 76 | return status, results, tracedata 77 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """A setuptools based setup module. 2 | 3 | See: 4 | https://packaging.python.org/en/latest/distributing.html 5 | https://github.com/pypa/sampleproject 6 | """ 7 | 8 | # Always prefer setuptools over distutils 9 | from setuptools import setup, find_packages 10 | 11 | # To use a consistent encoding 12 | from codecs import open 13 | from os import path 14 | 15 | here = path.abspath(path.dirname(__file__)) 16 | 17 | # Get the long description from the README file 18 | with open(path.join(here, "README.md"), encoding="utf-8") as f: 19 | long_description = f.read() 20 | 21 | with open(path.join(here, "requirements.txt")) as f: 22 | # deps are specified in requirements.txt. Avoid empty line. 23 | requirements = [x for x in f.read().split("\n") if x] 24 | 25 | # Arguments marked as "Required" below must be included for upload to PyPI. 26 | # Fields marked as "Optional" may be commented out. 27 | 28 | setup( 29 | name="pyotdr", 30 | version="2.1.1", 31 | description="A simple OTDR SOR file parser", 32 | long_description=long_description, 33 | long_description_content_type="text/markdown", 34 | url="https://github.com/sid5432/pyOTDR", 35 | author="Sidney Li, Rémi Desgrange", 36 | author_email="sidneyli5432@gmail.com, remi+pyotdr@desgran.ge", 37 | classifiers=[ # Optional 38 | # How mature is this project? Common values are 39 | # 3 - Alpha 40 | # 4 - Beta 41 | # 5 - Production/Stable 42 | "Development Status :: 4 - Beta", 43 | # Indicate who your project is intended for 44 | "Intended Audience :: Telecommunications Industry", 45 | "Topic :: Utilities", 46 | # Pick your license as you wish 47 | "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", 48 | "Programming Language :: Python :: 3", 49 | "Programming Language :: Python :: 3.6", 50 | "Programming Language :: Python :: 3.7", 51 | "Programming Language :: Python :: 3.8", 52 | "Programming Language :: Python :: 3.9", 53 | "Programming Language :: Python :: 3.10", 54 | "Programming Language :: Python :: 3.11", 55 | "Programming Language :: Python :: 3.12", 56 | ], 57 | keywords="SR-4731 reflectometer Telcordia OTDR SOR ", 58 | packages=find_packages(), 59 | install_requires=requirements, 60 | ) 61 | -------------------------------------------------------------------------------- /pyotdr/mapblock.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from . import parts 3 | 4 | logger = logging.getLogger("pyOTDR") 5 | 6 | 7 | def process(fh, results): 8 | """ 9 | fh: file handle; 10 | results: dict for results; 11 | """ 12 | 13 | fh.seek(0) 14 | 15 | tt = parts.get_string(fh) 16 | if tt == "Map": 17 | results["format"] = 2 18 | logger.debug("MAIN: bellcore 2.x version") 19 | else: 20 | results["format"] = 1 21 | logger.debug("MAIN: bellcore 1.x version") 22 | # rewind to start 23 | fh.seek(0) 24 | 25 | # get version number 26 | results["version"] = "%.2f" % (parts.get_uint(fh, 2) * 0.01) 27 | 28 | # get number of bytes in map block 29 | results["mapblock"] = dict() 30 | results["mapblock"]["nbytes"] = parts.get_uint(fh, 4) 31 | 32 | logger.debug( 33 | "MAIN: Version {}, block size {:d} bytes; next position {:#X}".format( 34 | results["version"], results["mapblock"]["nbytes"], fh.tell() 35 | ) 36 | ) 37 | 38 | # get number of block; not including the Map block 39 | results["mapblock"]["nblocks"] = parts.get_uint(fh, 2) - 1 40 | 41 | logger.debug( 42 | "MAIN: {:d} blocks to follow; next position {:#X}X".format( 43 | results["mapblock"]["nblocks"], fh.tell() 44 | ) 45 | ) 46 | logger.debug(parts.divider) 47 | 48 | # get block information 49 | logger.debug("MAIN: BLOCKS:") 50 | 51 | results["blocks"] = dict() 52 | startpos = results["mapblock"]["nbytes"] 53 | 54 | for i in range(results["mapblock"]["nblocks"]): 55 | bname = parts.get_string(fh) 56 | bver = "%.2f" % (parts.get_uint(fh, 2) * 0.01) 57 | bsize = parts.get_uint(fh, 4) 58 | 59 | ref = { 60 | "name": bname, 61 | "version": bver, 62 | "size": bsize, 63 | "pos": startpos, 64 | "order": i, 65 | } 66 | results["blocks"][bname] = ref 67 | 68 | logger.debug("MAIN: {} block: version {},".format(bname, bver)) 69 | logger.debug("block size {:d} bytes,".format(bsize)) 70 | logger.debug("start at pos {:#X}".format(startpos)) 71 | 72 | # start position of next block 73 | startpos += bsize 74 | 75 | logger.debug(parts.divider + "\n") 76 | logger.debug("MAIN: next position {:#X}".format(fh.tell())) 77 | logger.debug(parts.divider + "\n") 78 | 79 | status = "ok" 80 | return status 81 | -------------------------------------------------------------------------------- /tests/test_parts.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | 4 | cdir = os.path.dirname(os.path.realpath(__file__)) 5 | 6 | from pyotdr import parts 7 | 8 | 9 | def file1(): 10 | """SOR version 1 file""" 11 | filename = cdir + "/../data/demo_ab.sor" 12 | return filename 13 | 14 | 15 | def file2(): 16 | """SOR version 2 file""" 17 | filename = cdir + "/../data/sample1310_lowDR.sor" 18 | return filename 19 | 20 | 21 | # ------------------------------------------------------- 22 | def test_get_string(): 23 | """test get_string""" 24 | filename = file2() 25 | fh = parts.sorfile(filename) 26 | assert fh != None 27 | 28 | mystr = parts.get_string(fh) 29 | assert mystr == "Map" 30 | 31 | assert fh.tell() == 4 32 | fh.close() 33 | 34 | return 35 | 36 | 37 | # ------------------------------------------------------- 38 | def test_get_uint(): 39 | """test get_unsigned int (2 or 4)""" 40 | filename = file1() 41 | fh = parts.sorfile(filename) 42 | assert fh != None 43 | 44 | val = parts.get_uint(fh, nbytes=2) 45 | assert val == 100 46 | assert fh.tell() == 2 47 | 48 | val = parts.get_uint(fh, nbytes=4) 49 | assert val == 148 50 | assert fh.tell() == 6 51 | 52 | fh.close() 53 | 54 | return 55 | 56 | 57 | # ------------------------------------------------------- 58 | def test_get_hex(): 59 | """test hex conversion""" 60 | filename = file1() 61 | fh = parts.sorfile(filename) 62 | assert fh != None 63 | 64 | hstr = parts.get_hex(fh, 8) 65 | assert hstr == "64 00 94 00 00 00 0A 00 " 66 | fh.close() 67 | 68 | return 69 | 70 | 71 | # ------------------------------------------------------- 72 | def test_get_signed(): 73 | """test signed integer conversion""" 74 | filename = file2() 75 | fh = parts.sorfile(filename) 76 | assert fh != None 77 | 78 | fh.seek(461) 79 | fstr = parts.get_signed(fh, 2) 80 | assert fstr == 343 81 | 82 | fstr = parts.get_signed(fh, 2) 83 | assert fstr == 22820 84 | 85 | fstr = parts.get_signed(fh, 4) 86 | assert fstr == -38395 87 | 88 | fstr = parts.get_signed(fh, 8) 89 | assert fstr == 6002235321314002225 90 | 91 | fh.close() 92 | 93 | return 94 | 95 | 96 | # ================================== 97 | if __name__ == "__main__": 98 | test_get_string() 99 | test_get_uint() 100 | test_get_hex() 101 | test_get_signed() 102 | -------------------------------------------------------------------------------- /pyotdr/datapts.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | 4 | from . import parts 5 | 6 | sep = " :" 7 | 8 | logger = logging.getLogger(__name__) 9 | 10 | 11 | def process(fh, results, tracedata): 12 | """ 13 | fh: file handle; 14 | results: dict for results; 15 | 16 | we assume mapblock.process() has already been run 17 | """ 18 | bname = "DataPts" 19 | hsize = len(bname) + 1 # include trailing '\0' 20 | pname = "DataPts.process():" 21 | ref = None 22 | status = "nok" 23 | 24 | try: 25 | ref = results["blocks"][bname] 26 | startpos = ref["pos"] 27 | fh.seek(startpos) 28 | except: 29 | logger.error("{} {} block starting position unknown".format(pname, bname)) 30 | return status 31 | 32 | format = results["format"] 33 | 34 | if format == 2: 35 | mystr = fh.read(hsize).decode("ascii") 36 | if mystr != bname + "\0": 37 | logger.error("{} incorrect header {}".format(pname, mystr)) 38 | return status 39 | 40 | results[bname] = dict() 41 | xref = results[bname] 42 | 43 | # extra parameters 44 | xref["_datapts_params"] = {"xscaling": 1, "offset": "STV"} 45 | # method used by STV: minimum reading shifted to zero 46 | # method used by AFL/Noyes Trace.Net: maximum reading shifted to zero (approx) 47 | 48 | status = _process_data(fh, results, tracedata) 49 | 50 | # read the rest of the block (just in case) 51 | endpos = results["blocks"][bname]["pos"] + results["blocks"][bname]["size"] 52 | fh.read(endpos - fh.tell()) 53 | status = "ok" 54 | return status 55 | 56 | 57 | # ================================================================ 58 | def _process_data(fh, results, tracedata, dumptrace=True): 59 | """process version 1 format""" 60 | bname = "DataPts" 61 | xref = results[bname] 62 | 63 | try: 64 | # we assume SupParams block already processed 65 | model = results["SupParams"]["OTDR"] 66 | except: 67 | model = "" 68 | 69 | # special case: 70 | # old Noyes/AFL OFL250 model is off by factor of 10 71 | if model == "OFL250": 72 | xref["_datapts_params"]["xscaling"] = 0.1 73 | 74 | logger.debug("{} [initial 12 byte header follows]".format(sep)) 75 | 76 | N = parts.get_uint(fh, 4) 77 | # confirm N equal to FxdParams num data points 78 | if N != results["FxdParams"]["num data points"]: 79 | logger.warning( 80 | "block says number of data points is {} instead of {}".format( 81 | N, results["FxdParams"]["num data points"] 82 | ) 83 | ) 84 | 85 | xref["num data points"] = N 86 | logger.debug("{} num data points = {}".format(sep, N)) 87 | 88 | val = parts.get_signed(fh, 2) 89 | xref["num traces"] = val 90 | logger.debug("{} number of traces = {}".format(sep, val)) 91 | 92 | if val > 1: 93 | logger.warning("Cannot handle multiple traces ({}); aborting".format(val)) 94 | sys.exit() 95 | 96 | val = parts.get_uint(fh, 4) 97 | xref["num data points 2"] = val 98 | logger.debug("{} num data points again = {}".format(sep, val)) 99 | 100 | val = parts.get_uint(fh, 2) 101 | scaling_factor = val / 1000.0 102 | xref["scaling factor"] = scaling_factor 103 | logger.debug("{} scaling factor = {}".format(sep, scaling_factor)) 104 | 105 | # ..................................... 106 | # adjusted resolution 107 | dx = results["FxdParams"]["resolution"] 108 | dlist = [] 109 | for i in range(N): 110 | val = parts.get_uint(fh, 2) 111 | dlist.append(val) 112 | 113 | ymax = max(dlist) 114 | ymin = min(dlist) 115 | fs = 0.001 * scaling_factor 116 | disp_min = "%.3f" % (ymin * fs) 117 | disp_max = "%.3f" % (ymax * fs) 118 | xref["max before offset"] = float(disp_max) 119 | xref["min before offset"] = float(disp_min) 120 | 121 | logger.debug( 122 | "{} before applying offset: max {} dB, min {} dB".format( 123 | sep, disp_max, disp_min 124 | ) 125 | ) 126 | 127 | # ......................................... 128 | # save to file 129 | offset = xref["_datapts_params"]["offset"] 130 | xscaling = xref["_datapts_params"]["xscaling"] 131 | 132 | # convert/scale to dB 133 | if offset == "STV": 134 | nlist = [(ymax - x) * fs for x in dlist] 135 | elif offset == "AFL": 136 | nlist = [(ymin - x) * fs for x in dlist] 137 | else: # invert 138 | nlist = [-x * fs for x in dlist] 139 | 140 | for i in range(N): 141 | # more work but (maybe) less rounding issues 142 | x = dx * i * xscaling / 1000.0 # output in km 143 | tracedata.append("{:f}\t{:f}\n".format(x, nlist[i])) 144 | 145 | # ......................................... 146 | status = "ok" 147 | 148 | return status 149 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pyOTDR: Simple OTDR SOR file parse written in Python 2 | 3 | 4 | ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/pyotdr) 5 | 6 | The SOR ("Standard OTDR Record") data format is used to store OTDR 7 | ([optical time-domain 8 | reflectometer](http://https://en.wikipedia.org/wiki/Optical_time-domain_reflectometer) 9 | ) fiber data. The format is defined by the Telcordia [SR-4731, issue 10 | 2](http://telecom-info.telcordia.com/site-cgi/ido/docs.cgi?ID=SEARCH&DOCUMENT=SR-4731&) 11 | standard. While it is a standard, it is unfortunately not open, in 12 | that the specifics of the data format are not openly available. You 13 | can buy the standards document from Telcordia for $750 US (as of this 14 | writing), but this was beyond my budget. (And likely comes with 15 | all sorts of licensing restrictions. I wouldn't know; I have never 16 | seen the document!) 17 | 18 | 19 | There are several freely available OTDR trace readers available for 20 | download on the web, but most do not allow exporting the trace curve 21 | into, say, a CSV file for further analysis, and only one that I've 22 | found that runs natively on Linux (but without source code; although 23 | some of these do work in the Wine emulator). There have been requests 24 | on various Internet forums asking for information on how to extract 25 | the trace data, but I am not aware of anyone providing any answers 26 | beyond pointing to the free readers and the Telcordia standard. 27 | 28 | 29 | Fortunately the data format is not particularly hard to decipher. The 30 | table of contents on the Telcordia [SR-4731, issue 31 | 2](http://telecom-info.telcordia.com/site-cgi/ido/docs.cgi?ID=SEARCH&DOCUMENT=SR-4731&) 32 | page provides several clues, as does the Wikipedia page on [optical 33 | time-domain 34 | reflectometer](http://https://en.wikipedia.org/wiki/Optical_time-domain_reflectometer). 35 | 36 | 37 | Using a binary-file editor/viewer and comparing the outputs from 38 | some free OTDR SOR file readers, I was able to piece together most of 39 | the encoding in the SOR data format and written a simple program (in 40 | Python) that parses the SOR file and dumps the trace data into a file. 41 | (For a more detailed description, other than reading the source code, 42 | see [my blog 43 | post](http://morethanfootnotes.blogspot.com/2015/07/the-otdr-optical-time-domain.html?view=sidebar)). 44 | 45 | 46 | Presented here for your entertainment are my findings, in the hope 47 | that it will be useful to other people. But be aware that the 48 | information provided here is based on guess work from looking at a 49 | limited number of sample files. I can not guarantee that there are no 50 | mistakes, or that I have uncovered all possible exceptions to the 51 | rules that I have deduced from the sample files. **use it at your own 52 | risk! You have been warned!** 53 | 54 | The program was ported over from my original [pubOTDR](https://github.com/sid5432/pubOTDR) 55 | written in Perl. To parse an OTDR SOR file, run the program as 56 | 57 | pyotdr myfile.sor 58 | 59 | where "mfile.sor" is the name (path) to your SOR file. A OTDR trace file "myfile-trace.dat" and a JSON file "myfile-dump.json" will be produced. You can also output the results as an XML file "myfile-dump.xml" with: 60 | 61 | pyotdr myfile.sor XML 62 | 63 | There is also a Ruby version ([rbOTDR](https://github.com/sid5432/rbOTDR)), a 64 | javascript/node version([jsOTDR](https://github.com/sid5432/jsOTDR)), and a 65 | Clojure version ([cljotdr](https://github.com/sid5432/cljotdr)); the Clojure 66 | version may be of interest to people looking for a Java version, since Clojure runs on top of a Java Virtual Machine (JVM). 67 | 68 | ## Install 69 | 70 | pip install pyotdr 71 | 72 | ## Dev Install 73 | 74 | This program requires python 3.6 or higher (see badge) To install dependencies, run 75 | 76 | pip install -r requirement.txt 77 | 78 | I recently reorganized the whole package to submit to PyPI (Python Package Index). You should 79 | now be able to install the whole thing with 80 | 81 | pip install pyotdr 82 | 83 | This should create an executable called **pyotdr** that is ready to use. 84 | 85 | ### Docker 86 | 87 | There is a docker image that you can download with the command 88 | 89 | docker pull sidneyli/pyotdr:latest 90 | 91 | If you would like to build the docker image yourself, a docker file (*Dockerfile*) is provided to help you test this program. In the top level directory, type the command 92 | 93 | make docker-build 94 | 95 | or type the command 96 | 97 | docker build . -t sidneyli/pyotdr:latest 98 | 99 | to build the docker image. It will take a while to download the base image and compile. If 100 | all goes well, it should successfully build a new docker image *sidneyli/pyotdr:latest*. 101 | You can check with the command: 102 | 103 | docker images 104 | 105 | once the build is completed. You can now run the command 106 | 107 | make docker-run 108 | 109 | or type the docker command: 110 | 111 | docker run -ti --rm -u $(id -u):$(id -g) -v $HOME:/data sidneyli/pyotdr:latest /bin/bash 112 | 113 | to spin up a container. This will start a command shell for you to run the *pyotdr.py* program. The docker command 114 | above will mount your home directory to the */data* folder inside the docker instance. The 115 | command pyotdr.py (installed as */pyotdr/pyotdr.py*) will be in your execution path. 116 | The container removes itself when you exit the instance. 117 | 118 | (*Last Revised 2021-09-03*) 119 | 120 | 121 | -------------------------------------------------------------------------------- /pyotdr/parts.py: -------------------------------------------------------------------------------- 1 | import struct 2 | from typing import BinaryIO 3 | 4 | import crcmod 5 | import logging 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | divider = ( 10 | "--------------------------------------------------------------------------------" 11 | ) 12 | 13 | # speed of light 14 | sol = 299792.458 / 1.0e6 # = 0.299792458 km/usec 15 | 16 | 17 | class FH: 18 | """ 19 | wrapper around file handler to also process CRC checksum along the way 20 | """ 21 | 22 | def __init__(self, filehandle: BinaryIO): 23 | self.filehandle = filehandle 24 | self.bufsize = 2048 # adjust as needed 25 | self.buffer = b"" 26 | self.spaceleft = self.bufsize 27 | """ 28 | Calculate the CRC16 CCITT checksum of *data*. 29 | 30 | (CRC16 CCITT: start 0xFFFF, poly 0x1021) 31 | same as: 32 | 33 | crcmod.mkCrcFun( 0x11021, initCrc=0xFFFF, xorOut=0x0000, rev=False) 34 | """ 35 | self.crc16 = crcmod.predefined.Crc("crc-ccitt-false") 36 | 37 | def read(self, *args, **kargs): 38 | buf = self.filehandle.read(*args, **kargs) 39 | xlen = len(buf) 40 | if xlen > self.spaceleft: 41 | # process then clear buffer 42 | self.crc16.update(self.buffer) 43 | self.buffer = b"" 44 | self.spaceleft = self.bufsize 45 | 46 | self.buffer += buf 47 | self.spaceleft -= xlen 48 | return buf 49 | 50 | def digest(self): 51 | # last part of the file 52 | self.crc16.update(self.buffer) 53 | return self.crc16.crcValue 54 | 55 | def seek(self, *args, **kargs): 56 | # assume a rewind, and reset buffer 57 | if args[0] == 0: 58 | self.buffer = b"" 59 | self.spaceleft = self.bufsize 60 | self.crc16 = crcmod.predefined.Crc("crc-ccitt-false") 61 | 62 | return self.filehandle.seek(*args, **kargs) 63 | 64 | def tell(self) -> int: 65 | return self.filehandle.tell() 66 | 67 | def close(self) -> None: 68 | return self.filehandle.close() 69 | 70 | 71 | def sorfile(filename: str) -> "FH": 72 | """ 73 | return the file handle; need to close later; 74 | 75 | we assume that file content is 76 | - all read (not skipped) 77 | - only read once (except for the version 1 vs. 2 header; rewind at most once) 78 | - read sequentially 79 | these are needed for the CRC checksum calculation to work 80 | """ 81 | try: 82 | fh = open(filename, "rb") 83 | return FH(fh) 84 | except IOError as e: 85 | logger.error("Failed to read {}".format(filename)) 86 | raise e 87 | 88 | 89 | # ----------------------------------------------------- 90 | def get_string(fh: BinaryIO) -> str: 91 | """ 92 | Get string from the file handle. decode as utf-8 93 | """ 94 | mystr = b"" 95 | byte = fh.read(1) 96 | while byte != "": 97 | tt = struct.unpack("c", byte)[0] 98 | if tt == b"\x00": 99 | break 100 | mystr += tt 101 | byte = fh.read(1) 102 | 103 | return mystr.decode("utf-8") 104 | 105 | 106 | def get_float(fh: "FH", nbytes: int) -> float: 107 | """get floating point; fh is the file handle""" 108 | tmp = fh.read(nbytes) 109 | if nbytes == 4: 110 | return struct.unpack(" 8bytes") 116 | 117 | 118 | def get_uint(fh: "FH", nbytes: int = 2) -> int: 119 | """ 120 | get unsigned int (little endian), 2 bytes by default 121 | (assume nbytes is positive) 122 | """ 123 | 124 | word = fh.read(nbytes) 125 | if nbytes == 2: 126 | # unsigned short 127 | return struct.unpack(" 8bytes") 137 | 138 | 139 | def get_signed(fh: "FH", nbytes: int = 2) -> int: 140 | """ 141 | get signed int (little endian), 2 bytes by default 142 | (assume nbytes is positive) 143 | """ 144 | 145 | word = fh.read(nbytes) 146 | if nbytes == 2: 147 | # unsigned short 148 | val = struct.unpack(" 8bytes") 158 | 159 | return val 160 | 161 | 162 | def get_hex(fh: "FH", nbytes: int = 1) -> str: 163 | """ 164 | get nbyte bytes (1 by default) 165 | and display as hexidecimal 166 | """ 167 | hstr = "" 168 | for i in range(nbytes): 169 | b = "%02X " % ord(fh.read(1)) 170 | hstr += b 171 | return hstr 172 | 173 | 174 | def slurp(fh: "FH", bname: str, results: dict) -> str: 175 | """ 176 | fh: file handle; 177 | results: dict for results; 178 | 179 | just read this block without processing 180 | """ 181 | status = "nok" 182 | 183 | try: 184 | ref = results["blocks"][bname] 185 | startpos = ref["pos"] 186 | fh.seek(startpos) 187 | except: 188 | # TODO this should raise 189 | logger.error("{} block starting position unknown".format(bname)) 190 | return status 191 | 192 | nn = ref["size"] 193 | 194 | fh.read(nn) 195 | 196 | status = "ok" 197 | return status 198 | -------------------------------------------------------------------------------- /pyotdr/genparams.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from . import parts 3 | 4 | logger = logging.getLogger(__name__) 5 | 6 | sep = " :" 7 | 8 | 9 | def process(fh, results): 10 | """ 11 | fh: file handle; 12 | results: dict for results; 13 | 14 | we assume mapblock.process() has already been run 15 | """ 16 | bname = "GenParams" 17 | hsize = len(bname) + 1 # include trailing '\0' 18 | pname = "genparam.process():" 19 | ref = None 20 | status = "nok" 21 | 22 | try: 23 | ref = results["blocks"][bname] 24 | startpos = ref["pos"] 25 | fh.seek(startpos) 26 | except: 27 | logger.error("{} {} block starting position unknown".format(pname, bname)) 28 | return status 29 | 30 | format = results["format"] 31 | 32 | if format == 2: 33 | mystr = fh.read(hsize).decode("ascii") 34 | if mystr != bname + "\0": 35 | logger.error('{} incorrect header {} vs "{}"'.format(pname, mystr, bname)) 36 | return status 37 | 38 | results[bname] = dict() 39 | xref = results[bname] 40 | 41 | if format == 1: 42 | status = process1(fh, results) 43 | else: 44 | status = process2(fh, results) 45 | 46 | # read the rest of the block (just in case) 47 | #endpos = results["blocks"][bname]["pos"] + results["blocks"][bname]["size"] 48 | #fh.read(abs(endpos - fh.tell())) 49 | 50 | return status 51 | 52 | 53 | # ================================================================ 54 | def build_condition(bcstr): 55 | """decode build condition""" 56 | if bcstr == "BC": 57 | bcstr += " (as-built)" 58 | elif bcstr == "CC": 59 | bcstr += " (as-current)" 60 | elif bcstr == "RC": 61 | bcstr += " (as-repaired)" 62 | elif bcstr == "OT": 63 | bcstr += " (other)" 64 | else: 65 | bcstr += " (unknown)" 66 | 67 | return bcstr 68 | 69 | 70 | # ================================================================ 71 | def fiber_type(val): 72 | """ 73 | decode fiber type 74 | REF: http://www.ciscopress.com/articles/article.asp?p=170740&seqNum=7 75 | """ 76 | if val == 651: # ITU-T G.651 77 | fstr = "G.651 (50um core multimode)" 78 | elif val == 652: # standard nondispersion-shifted 79 | fstr = "G.652 (standard SMF)" 80 | # G.652.C low Water Peak Nondispersion-Shifted Fiber 81 | elif val == 653: 82 | fstr = "G.653 (dispersion-shifted fiber)" 83 | elif val == 654: 84 | fstr = "G.654 (1550nm loss-minimzed fiber)" 85 | elif val == 655: 86 | fstr = "G.655 (nonzero dispersion-shifted fiber)" 87 | else: # TODO add G657 88 | fstr = "%d (unknown)" % val 89 | 90 | return fstr 91 | 92 | 93 | # ================================================================ 94 | def process1(fh, results): 95 | """process version 1 format""" 96 | bname = "GenParams" 97 | xref = results[bname] 98 | 99 | lang = fh.read(2).decode("ascii") 100 | xref["language"] = lang 101 | logger.debug("{} language '{}', next pos {}".format(sep, lang, fh.tell())) 102 | 103 | fields = ( 104 | "cable ID", # ........... 0 105 | "fiber ID", # ........... 1 106 | "wavelength", # ............2: fixed 2 bytes value 107 | "location A", # ............ 3 108 | "location B", # ............ 4 109 | "cable code/fiber type", # ............ 5 110 | "build condition", # ....... 6: fixed 2 bytes char/string 111 | "user offset", # ........... 7: fixed 4 bytes (Andrew Jones) 112 | "operator", # ........... 8 113 | "comments", # ........... 9 114 | ) 115 | 116 | count = 0 117 | for field in fields: 118 | if field == "build condition": 119 | xstr = build_condition(fh.read(2).decode("ascii")) 120 | elif field == "wavelength": 121 | val = parts.get_uint(fh, 2) 122 | xstr = "%d nm" % val 123 | elif field == "user offset": 124 | val = parts.get_signed(fh, 4) 125 | xstr = "%d" % val 126 | else: 127 | xstr = parts.get_string(fh) 128 | 129 | logger.debug("{} {}. {}: {}".format(sep, count, field, xstr)) 130 | 131 | xref[field] = xstr 132 | count += 1 133 | 134 | status = "ok" 135 | 136 | return status 137 | 138 | 139 | # ================================================================ 140 | def process2(fh, results): 141 | """process version 2 format""" 142 | bname = "GenParams" 143 | xref = results[bname] 144 | 145 | lang = fh.read(2).decode("ascii") 146 | xref["language"] = lang 147 | logger.debug("{} language: '{}', next pos {}".format(sep, lang, fh.tell())) 148 | 149 | fields = ( 150 | "cable ID", # ........... 0 151 | "fiber ID", # ........... 1 152 | "fiber type", # ........... 2: fixed 2 bytes value 153 | "wavelength", # ............3: fixed 2 bytes value 154 | "location A", # ............ 4 155 | "location B", # ............ 5 156 | "cable code/fiber type", # ............ 6 157 | "build condition", # ....... 7: fixed 2 bytes char/string 158 | "user offset", # ........... 8: fixed 4 bytes int (Andrew Jones) 159 | "user offset distance", # .. 9: fixed 4 bytes int (Andrew Jones) 160 | "operator", # ........... 10 161 | "comments", # ........... 11 162 | ) 163 | 164 | count = 0 165 | for field in fields: 166 | if field == "build condition": 167 | xstr = build_condition(fh.read(2).decode("ascii")) 168 | elif field == "fiber type": 169 | val = parts.get_uint(fh, 2) 170 | xstr = fiber_type(val) 171 | elif field == "wavelength": 172 | val = parts.get_uint(fh, 2) 173 | xstr = "%d nm" % val 174 | elif field == "user offset" or field == "user offset distance": 175 | val = parts.get_signed(fh, 4) 176 | xstr = "%d" % val 177 | else: 178 | xstr = parts.get_string(fh) 179 | 180 | logger.debug("{} {}. {}: {}".format(sep, count, field, xstr)) 181 | 182 | xref[field] = xstr 183 | count += 1 184 | 185 | status = "ok" 186 | 187 | return status 188 | -------------------------------------------------------------------------------- /data/demo_ab-dump.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 65.535 5 | 11776 6 | 15.829 7 | 1 8 | 11776 9 | <_datapts_params> 10 | 1 11 | STV 12 | 13 | 1.0 14 | 15 | 16 | 17 | EN 18 | 19 | K1 AB 20 | HP Emulation SW 21 | 22 | 23 | 0 24 | HP 25 | 1310 nm 26 | CC (as-current) 27 | 28 | 29 | 30 | 23564 31 | 1.01 32 | 3 33 | DataPts 34 | 328 35 | 36 | 37 | 44 38 | 1.01 39 | 0 40 | GenParams 41 | 148 42 | 43 | 44 | 54 45 | 1.01 46 | 2 47 | FxdParams 48 | 274 49 | 50 | 51 | 122 52 | 2.21 53 | 5 54 | HPEvent 55 | 24036 56 | 57 | 58 | 144 59 | 1.01 60 | 4 61 | KeyEvents 62 | 23892 63 | 64 | 65 | 1506 66 | 2.22 67 | 7 68 | HPSpecialInfo 69 | 24200 70 | 71 | 72 | 2 73 | 1.00 74 | 8 75 | Cksum 76 | 25706 77 | 78 | 79 | 82 80 | 1.01 81 | 1 82 | SupParams 83 | 192 84 | 85 | 86 | 42 87 | 1.00 88 | 6 89 | Threshold 90 | 24158 91 | 92 | 93 | 94 | 1.471100 95 | 0.02499999 usec 96 | 0 97 | 11776 98 | -81.50 dB 99 | 5.000 dB 100 | 5.09469679293 101 | 0 102 | 52058 103 | 1 104 | 0 105 | 59.9951494335 106 | Thu Feb 05 03:46:14 1998 (886668374 sec) 107 | 0.000 dB 108 | 1310.0 nm 109 | 1000 ns 110 | 30 111 | -0.000 dB 112 | mt (meters) 113 | 1000 114 | 115 | 1 116 | 117 | 9 118 | 148 119 | 120 | 121 | DE37300051 122 | 3617G00108 123 | E6008A 124 | A3717-00051 125 | 28.01.98 126 | Hewlett Packard 127 | E6000A 128 | 3.0 129 | 130 | demo_ab.sor 131 | 1.00 132 | 133 | 38827 134 | 38827 135 | True 136 | 137 | 138 | 5 139 | 140 | 0.0 141 | 0.0 142 | 0.0 143 | 0.0 144 | 50.727876 145 | 50.727876 146 | 147 | 148 | 0.344 149 | 12.711 150 | 151 | 0.000 152 | 0F9999LS {auto} loss/drop/gain 153 | 0.209 154 | 155 | 156 | 0.342 157 | 25.351 158 | 159 | -51.514 160 | 1F9999LS {auto} reflection 161 | 0.087 162 | 163 | 164 | 0.000 165 | 0.000 166 | 167 | -50.000 168 | 1F9999LS {auto} reflection 169 | 0.000 170 | 171 | 172 | 0.344 173 | 38.047 174 | 175 | 0.000 176 | 0F9999LS {auto} loss/drop/gain 177 | 0.149 178 | 179 | 180 | 0.344 181 | 50.728 182 | 183 | -16.726 184 | 1E9999LS {auto} reflection 185 | 13.232 186 | 187 | 188 | -------------------------------------------------------------------------------- /pyotdr/keyevents.py: -------------------------------------------------------------------------------- 1 | import re 2 | import logging 3 | from . import parts 4 | 5 | logger = logging.getLogger(__name__) 6 | 7 | sep = " :" 8 | 9 | 10 | def process(fh, results): 11 | """ 12 | fh: file handle; 13 | results: dict for results; 14 | 15 | we assume mapblock.process() has already been run 16 | """ 17 | bname = "KeyEvents" 18 | hsize = len(bname) + 1 # include trailing '\0' 19 | pname = bname + ".process():" 20 | ref = None 21 | status = "nok" 22 | 23 | try: 24 | ref = results["blocks"][bname] 25 | startpos = ref["pos"] 26 | fh.seek(startpos) 27 | except: 28 | logger.error("{} {} block starting position unknown".format(pname, bname)) 29 | return status 30 | 31 | format = results["format"] 32 | 33 | if format == 2: 34 | mystr = fh.read(hsize).decode("ascii") 35 | if mystr != bname + "\0": 36 | logger.error("{} {} incorrect header ".format(pname, mystr)) 37 | return status 38 | 39 | results[bname] = dict() 40 | xref = results[bname] 41 | 42 | status = _process_keyevents(fh, format, results) 43 | 44 | # read the rest of the block (just in case) 45 | endpos = results["blocks"][bname]["pos"] + results["blocks"][bname]["size"] 46 | fh.read(endpos - fh.tell()) 47 | status = "ok" 48 | return status 49 | 50 | 51 | # ================================================================ 52 | def _process_keyevents(fh, format, results): 53 | """process version 1 or 2 format""" 54 | bname = "KeyEvents" 55 | xref = results[bname] 56 | 57 | # number of events 58 | nev = parts.get_uint(fh, 2) 59 | logger.debug("{} {} events".format(sep, nev)) 60 | 61 | xref["num events"] = nev 62 | 63 | factor = 1e-4 * parts.sol / float(results["FxdParams"]["index"]) 64 | 65 | pat = re.compile("(.)(.)9999LS") 66 | 67 | for j in range(nev): 68 | x2ref = xref["event %d" % (1 + j)] = {} 69 | 70 | xid = parts.get_uint(fh, 2) # 00-01: event number 71 | dist = ( 72 | parts.get_uint(fh, 4) * factor 73 | ) # 02-05: time-of-travel; need to convert to distance 74 | 75 | slope = parts.get_signed(fh, 2) * 0.001 # 06-07: slope 76 | splice = parts.get_signed(fh, 2) * 0.001 # 08-09: splice loss 77 | refl = parts.get_signed(fh, 4) * 0.001 # 10-13: reflection loss 78 | 79 | xtype = fh.read(8) # 14-21: event type 80 | xtype = xtype.decode("ascii") 81 | 82 | mresults = pat.match(xtype) 83 | if mresults is not None: 84 | subtype = mresults.groups(0)[0] 85 | manual = mresults.groups(0)[1] 86 | 87 | if manual == "A": 88 | xtype += " {manual}" 89 | else: 90 | xtype += " {auto}" 91 | 92 | if subtype == "1": 93 | xtype += " reflection" 94 | elif subtype == "0": 95 | xtype += " loss/drop/gain" 96 | elif subtype == "2": 97 | xtype += " multiple" 98 | else: 99 | xtype += " unknown '" + subtype + "'" 100 | else: 101 | xtype += " [unknown type " + xtype + "]" 102 | 103 | if format == 2: 104 | end_prev = parts.get_uint(fh, 4) * factor # 22-25: end of previous event 105 | start_curr = parts.get_uint(fh, 4) * factor # 26-29: start of current event 106 | end_curr = parts.get_uint(fh, 4) * factor # 30-33: end of current event 107 | start_next = parts.get_uint(fh, 4) * factor # 34-37: start of next event 108 | pkpos = parts.get_uint(fh, 4) * factor # 38-41: peak point of event 109 | 110 | comments = parts.get_string(fh) 111 | 112 | x2ref["type"] = xtype 113 | x2ref["distance"] = "%.3f" % dist 114 | x2ref["slope"] = "%.3f" % slope 115 | x2ref["splice loss"] = "%.3f" % splice 116 | x2ref["refl loss"] = "%.3f" % refl 117 | x2ref["comments"] = comments 118 | 119 | if format == 2: 120 | x2ref["end of prev"] = "%.3f" % end_prev 121 | x2ref["start of curr"] = "%.3f" % start_curr 122 | x2ref["end of curr"] = "%.3f" % end_curr 123 | x2ref["start of next"] = "%.3f" % start_next 124 | x2ref["peak"] = "%.3f" % pkpos 125 | 126 | logger.debug("{} Event {}: type {}".format(sep, xid, xtype)) 127 | logger.debug("{}{} distance: {:.3f} km".format(sep, sep, dist)) 128 | logger.debug("{}{} slope: {:.3f} dB/km".format(sep, sep, slope)) 129 | logger.debug("{}{} splice loss: {:.3f} dB".format(sep, sep, splice)) 130 | logger.debug("{}{} refl loss: {:.3f} dB".format(sep, sep, refl)) 131 | # version 2 132 | if format == 2: 133 | logger.debug( 134 | "{}{} end of previous event: {:.3f} km".format(sep, sep, end_prev) 135 | ) 136 | logger.debug( 137 | "{}{} start of current event: {:.3f} km".format(sep, sep, start_curr) 138 | ) 139 | logger.debug( 140 | "{}{} end of current event: {:.3f} km".format(sep, sep, end_curr) 141 | ) 142 | logger.debug( 143 | "{}{} start of next event: {:.3f} km".format(sep, sep, start_next) 144 | ) 145 | logger.debug("{}{} peak point of event: {:.3f} km".format(sep, sep, pkpos)) 146 | 147 | # common 148 | logger.debug("{}{} comments: {}".format(sep, sep, comments)) 149 | 150 | # ................................................... 151 | total = parts.get_signed(fh, 4) * 0.001 # 00-03: total loss 152 | loss_start = parts.get_signed(fh, 4) * factor # 04-07: loss start position 153 | loss_finish = parts.get_uint(fh, 4) * factor # 08-11: loss finish position 154 | orl = parts.get_uint(fh, 2) * 0.001 # 12-13: optical return loss (ORL) 155 | orl_start = parts.get_signed(fh, 4) * factor # 14-17: ORL start position 156 | orl_finish = parts.get_uint(fh, 4) * factor # 18-21: ORL finish position 157 | 158 | logger.debug("{} Summary:".format(sep)) 159 | logger.debug("{}{} total loss: {:.3f} dB".format(sep, sep, total)) 160 | logger.debug("{}{} ORL: {:.3f} dB".format(sep, sep, orl)) 161 | logger.debug("{}{} loss start: {:f} km".format(sep, sep, loss_start)) 162 | logger.debug("{}{} loss end: {:f} km".format(sep, sep, loss_finish)) 163 | logger.debug("{}{} ORL start: {:f} km".format(sep, sep, orl_start)) 164 | logger.debug("{}{} ORL finish: {:f} km".format(sep, sep, orl_finish)) 165 | 166 | x3ref = xref["Summary"] = {} 167 | x3ref["total loss"] = float("%.3f" % total) 168 | x3ref["ORL"] = float("%.3f" % orl) 169 | x3ref["loss start"] = float("%.6f" % loss_start) 170 | x3ref["loss end"] = float("%.6f" % loss_finish) 171 | x3ref["ORL start"] = float("%.6f" % orl_start) 172 | x3ref["ORL finish"] = float("%.6f" % orl_finish) 173 | 174 | # ................ 175 | status = "ok" 176 | return status 177 | -------------------------------------------------------------------------------- /data/M200_Sample_005_S13-dump.json: -------------------------------------------------------------------------------- 1 | { 2 | "Cksum": { 3 | "checksum": 45751, 4 | "checksum_ours": 45751, 5 | "match": true 6 | }, 7 | "DataPts": { 8 | "_datapts_params": { 9 | "offset": "STV", 10 | "xscaling": 1 11 | }, 12 | "max before offset": 65.535, 13 | "min before offset": 0.535, 14 | "num data points": 16000, 15 | "num data points 2": 16000, 16 | "num traces": 1, 17 | "scaling factor": 1.0 18 | }, 19 | "FxdParams": { 20 | "BC": "-77.00 dB", 21 | "EOT thr": "6.000 dB", 22 | "acquisition offset": 0, 23 | "date/time": "Sat Jun 17 10:01:11 2006 (1150538471 sec)", 24 | "front panel offset": 0, 25 | "index": "1.467700", 26 | "loss thr": "0.050 dB", 27 | "noise floor level": 65535, 28 | "noise floor scaling factor": 0, 29 | "num averages": 6656, 30 | "num data points": 16000, 31 | "number of pulse width entries": 1, 32 | "power offset first point": 0, 33 | "pulse width": "100 ns", 34 | "range": 8.17040152619745, 35 | "refl thr": "-65.000 dB", 36 | "resolution": 0.5106500953873406, 37 | "sample spacing": "0.0025 usec", 38 | "unit": "mt (meters)", 39 | "wavelength": "131.0 nm" 40 | }, 41 | "GenParams": { 42 | "build condition": "BC (as-built)", 43 | "cable ID": "M200_DEMO_D", 44 | "cable code/fiber type": " ", 45 | "comments": " ", 46 | "fiber ID": "005", 47 | "language": "EN", 48 | "location A": "Conant", 49 | "location B": "Morrill", 50 | "operator": "SUZY", 51 | "user offset": "7475", 52 | "wavelength": "1310 nm" 53 | }, 54 | "KeyEvents": { 55 | "Summary": { 56 | "ORL": 30.279, 57 | "ORL finish": 3.787226, 58 | "ORL start": 0.0, 59 | "loss end": 3.787226, 60 | "loss start": 0.0, 61 | "total loss": 2.564 62 | }, 63 | "event 1": { 64 | "comments": "Link Start", 65 | "distance": "0.000", 66 | "refl loss": "-44.478", 67 | "slope": "0.000", 68 | "splice loss": "0.168", 69 | "type": "1F9999LS {auto} reflection" 70 | }, 71 | "event 2": { 72 | "comments": " ", 73 | "distance": "0.091", 74 | "refl loss": "-38.454", 75 | "slope": "0.120", 76 | "splice loss": "0.791", 77 | "type": "1F9999LS {auto} reflection" 78 | }, 79 | "event 3": { 80 | "comments": " ", 81 | "distance": "0.395", 82 | "refl loss": "-51.983", 83 | "slope": "0.362", 84 | "splice loss": "0.045", 85 | "type": "1F9999LS {auto} reflection" 86 | }, 87 | "event 4": { 88 | "comments": " ", 89 | "distance": "0.796", 90 | "refl loss": "-58.134", 91 | "slope": "0.334", 92 | "splice loss": "0.347", 93 | "type": "1F9999LS {auto} reflection" 94 | }, 95 | "event 5": { 96 | "comments": " ", 97 | "distance": "3.787", 98 | "refl loss": "-30.760", 99 | "slope": "0.321", 100 | "splice loss": "0.000", 101 | "type": "1E9999LS {auto} reflection" 102 | }, 103 | "num events": 5 104 | }, 105 | "SupParams": { 106 | "OTDR": "M200", 107 | "OTDR S/N": " ", 108 | "module": " ", 109 | "module S/N": " ", 110 | "other": " ", 111 | "software": "0.0.14", 112 | "supplier": "Noyes" 113 | }, 114 | "blocks": { 115 | "Cksum": { 116 | "name": "Cksum", 117 | "order": 7, 118 | "pos": 32768, 119 | "size": 2, 120 | "version": "1.00" 121 | }, 122 | "DataPts": { 123 | "name": "DataPts", 124 | "order": 3, 125 | "pos": 254, 126 | "size": 32012, 127 | "version": "1.00" 128 | }, 129 | "FxdParams": { 130 | "name": "FxdParams", 131 | "order": 2, 132 | "pos": 200, 133 | "size": 54, 134 | "version": "1.10" 135 | }, 136 | "GenParams": { 137 | "name": "GenParams", 138 | "order": 0, 139 | "pos": 124, 140 | "size": 50, 141 | "version": "1.10" 142 | }, 143 | "KeyEvents": { 144 | "name": "KeyEvents", 145 | "order": 4, 146 | "pos": 32266, 147 | "size": 153, 148 | "version": "1.10" 149 | }, 150 | "Noyes2": { 151 | "name": "Noyes2", 152 | "order": 5, 153 | "pos": 32419, 154 | "size": 292, 155 | "version": "2.02" 156 | }, 157 | "Noyes3": { 158 | "name": "Noyes3", 159 | "order": 6, 160 | "pos": 32711, 161 | "size": 57, 162 | "version": "2.02" 163 | }, 164 | "SupParams": { 165 | "name": "SupParams", 166 | "order": 1, 167 | "pos": 174, 168 | "size": 26, 169 | "version": "1.00" 170 | } 171 | }, 172 | "filename": "M200_Sample_005_S13.sor", 173 | "format": 1, 174 | "mapblock": { 175 | "nblocks": 8, 176 | "nbytes": 124 177 | }, 178 | "version": "1.00" 179 | } 180 | -------------------------------------------------------------------------------- /data/demo_ab-dump.json: -------------------------------------------------------------------------------- 1 | { 2 | "Cksum": { 3 | "checksum": 38827, 4 | "checksum_ours": 38827, 5 | "match": true 6 | }, 7 | "DataPts": { 8 | "_datapts_params": { 9 | "offset": "STV", 10 | "xscaling": 1 11 | }, 12 | "max before offset": 65.535, 13 | "min before offset": 15.829, 14 | "num data points": 11776, 15 | "num data points 2": 11776, 16 | "num traces": 1, 17 | "scaling factor": 1.0 18 | }, 19 | "FxdParams": { 20 | "BC": "-81.50 dB", 21 | "EOT thr": "5.000 dB", 22 | "acquisition offset": 0, 23 | "date/time": "Thu Feb 05 08:46:14 1998 (886668374 sec)", 24 | "front panel offset": 0, 25 | "index": "1.471100", 26 | "loss thr": "0.000 dB", 27 | "noise floor level": 52058, 28 | "noise floor scaling factor": 1000, 29 | "num averages": 30, 30 | "num data points": 11776, 31 | "number of pulse width entries": 1, 32 | "power offset first point": 0, 33 | "pulse width": "1000 ns", 34 | "range": 59.99514943351243, 35 | "refl thr": "-0.000 dB", 36 | "resolution": 5.094696792927346, 37 | "sample spacing": "0.02499999 usec", 38 | "unit": "mt (meters)", 39 | "wavelength": "1310.0 nm" 40 | }, 41 | "GenParams": { 42 | "build condition": "CC (as-current)", 43 | "cable ID": "K1 AB", 44 | "cable code/fiber type": " ", 45 | "comments": "HP Emulation SW", 46 | "fiber ID": " ", 47 | "language": "EN", 48 | "location A": " ", 49 | "location B": " ", 50 | "operator": "HP ", 51 | "user offset": "0", 52 | "wavelength": "1310 nm" 53 | }, 54 | "KeyEvents": { 55 | "Summary": { 56 | "ORL": 0.0, 57 | "ORL finish": 50.727876, 58 | "ORL start": 0.0, 59 | "loss end": 50.727876, 60 | "loss start": 0.0, 61 | "total loss": 0.0 62 | }, 63 | "event 1": { 64 | "comments": " ", 65 | "distance": "0.000", 66 | "refl loss": "-50.000", 67 | "slope": "0.000", 68 | "splice loss": "0.000", 69 | "type": "1F9999LS {auto} reflection" 70 | }, 71 | "event 2": { 72 | "comments": " ", 73 | "distance": "12.711", 74 | "refl loss": "0.000", 75 | "slope": "0.344", 76 | "splice loss": "0.209", 77 | "type": "0F9999LS {auto} loss/drop/gain" 78 | }, 79 | "event 3": { 80 | "comments": " ", 81 | "distance": "25.351", 82 | "refl loss": "-51.514", 83 | "slope": "0.342", 84 | "splice loss": "0.087", 85 | "type": "1F9999LS {auto} reflection" 86 | }, 87 | "event 4": { 88 | "comments": " ", 89 | "distance": "38.047", 90 | "refl loss": "0.000", 91 | "slope": "0.344", 92 | "splice loss": "0.149", 93 | "type": "0F9999LS {auto} loss/drop/gain" 94 | }, 95 | "event 5": { 96 | "comments": " ", 97 | "distance": "50.728", 98 | "refl loss": "-16.726", 99 | "slope": "0.344", 100 | "splice loss": "13.232", 101 | "type": "1E9999LS {auto} reflection" 102 | }, 103 | "num events": 5 104 | }, 105 | "SupParams": { 106 | "OTDR": "E6000A ", 107 | "OTDR S/N": "3617G00108 ", 108 | "module": "E6008A ", 109 | "module S/N": "DE37300051 ", 110 | "other": "A3717-00051\n28.01.98 ", 111 | "software": "3.0", 112 | "supplier": "Hewlett Packard" 113 | }, 114 | "blocks": { 115 | "Cksum": { 116 | "name": "Cksum", 117 | "order": 8, 118 | "pos": 25706, 119 | "size": 2, 120 | "version": "1.00" 121 | }, 122 | "DataPts": { 123 | "name": "DataPts", 124 | "order": 3, 125 | "pos": 328, 126 | "size": 23564, 127 | "version": "1.01" 128 | }, 129 | "FxdParams": { 130 | "name": "FxdParams", 131 | "order": 2, 132 | "pos": 274, 133 | "size": 54, 134 | "version": "1.01" 135 | }, 136 | "GenParams": { 137 | "name": "GenParams", 138 | "order": 0, 139 | "pos": 148, 140 | "size": 44, 141 | "version": "1.01" 142 | }, 143 | "HPEvent": { 144 | "name": "HPEvent", 145 | "order": 5, 146 | "pos": 24036, 147 | "size": 122, 148 | "version": "2.21" 149 | }, 150 | "HPSpecialInfo": { 151 | "name": "HPSpecialInfo", 152 | "order": 7, 153 | "pos": 24200, 154 | "size": 1506, 155 | "version": "2.22" 156 | }, 157 | "KeyEvents": { 158 | "name": "KeyEvents", 159 | "order": 4, 160 | "pos": 23892, 161 | "size": 144, 162 | "version": "1.01" 163 | }, 164 | "SupParams": { 165 | "name": "SupParams", 166 | "order": 1, 167 | "pos": 192, 168 | "size": 82, 169 | "version": "1.01" 170 | }, 171 | "Threshold": { 172 | "name": "Threshold", 173 | "order": 6, 174 | "pos": 24158, 175 | "size": 42, 176 | "version": "1.00" 177 | } 178 | }, 179 | "filename": "demo_ab.sor", 180 | "format": 1, 181 | "mapblock": { 182 | "nblocks": 9, 183 | "nbytes": 148 184 | }, 185 | "version": "1.00" 186 | } 187 | -------------------------------------------------------------------------------- /data/sample1310_lowDR-dump.json: -------------------------------------------------------------------------------- 1 | { 2 | "Cksum": { 3 | "checksum": 59892, 4 | "checksum_ours": 62998, 5 | "match": false 6 | }, 7 | "DataPts": { 8 | "_datapts_params": { 9 | "offset": "STV", 10 | "xscaling": 1 11 | }, 12 | "max before offset": 63.611, 13 | "min before offset": 6.566, 14 | "num data points": 15736, 15 | "num data points 2": 15736, 16 | "num traces": 1, 17 | "scaling factor": 1.0 18 | }, 19 | "FxdParams": { 20 | "BC": "-80.00 dB", 21 | "EOT thr": "3.000 dB", 22 | "X1": 0, 23 | "X2": 0, 24 | "Y1": 0, 25 | "Y2": 0, 26 | "acquisition offset": -367, 27 | "acquisition offset distance": 0, 28 | "acquisition range distance": 0, 29 | "averaging time": "15 sec", 30 | "date/time": "Tue Nov 22 08:49:23 2011 (1321951763 sec)", 31 | "front panel offset": 0, 32 | "index": "1.475000", 33 | "loss thr": "0.200 dB", 34 | "noise floor level": 2340, 35 | "noise floor scaling factor": 100, 36 | "num averages": 16380, 37 | "num data points": 15736, 38 | "number of pulse width entries": 1, 39 | "power offset first point": 0, 40 | "pulse width": "1000 ns", 41 | "range": 79.958173424989, 42 | "refl thr": "-40.000 dB", 43 | "resolution": 5.081226069203674, 44 | "sample spacing": "0.02499999 usec", 45 | "trace type": "ST[standard trace]", 46 | "unit": "km (kilometers)", 47 | "wavelength": "1310.0 nm" 48 | }, 49 | "GenParams": { 50 | "build condition": "BC (as-built)", 51 | "cable ID": " ", 52 | "cable code/fiber type": " ", 53 | "comments": " ", 54 | "fiber ID": " ", 55 | "fiber type": "G.652 (standard SMF)", 56 | "language": "EN", 57 | "location A": " ", 58 | "location B": " ", 59 | "operator": " ", 60 | "user offset": "0", 61 | "user offset distance": "0", 62 | "wavelength": "1310 nm" 63 | }, 64 | "KeyEvents": { 65 | "Summary": { 66 | "ORL": 32.392, 67 | "ORL finish": 17.065447, 68 | "ORL start": -0.007459, 69 | "loss end": 17.065447, 70 | "loss start": -0.007459, 71 | "total loss": 6.39 72 | }, 73 | "event 1": { 74 | "comments": "", 75 | "distance": "0.000", 76 | "end of curr": "0.308", 77 | "end of prev": "0.000", 78 | "peak": "0.038", 79 | "refl loss": "-44.177", 80 | "slope": "0.000", 81 | "splice loss": "0.000", 82 | "start of curr": "0.000", 83 | "start of next": "2.020", 84 | "type": "0F9999LS {auto} loss/drop/gain" 85 | }, 86 | "event 2": { 87 | "comments": "", 88 | "distance": "2.020", 89 | "end of curr": "2.655", 90 | "end of prev": "0.308", 91 | "peak": "2.040", 92 | "refl loss": "-40.574", 93 | "slope": "0.334", 94 | "splice loss": "0.557", 95 | "start of curr": "2.020", 96 | "start of next": "17.065", 97 | "type": "0F9999LS {auto} loss/drop/gain" 98 | }, 99 | "event 3": { 100 | "comments": "", 101 | "distance": "17.065", 102 | "end of curr": "79.945", 103 | "end of prev": "2.655", 104 | "peak": "17.081", 105 | "refl loss": "-38.395", 106 | "slope": "0.343", 107 | "splice loss": "22.820", 108 | "start of curr": "17.065", 109 | "start of next": "79.945", 110 | "type": "1E9999LS {auto} reflection" 111 | }, 112 | "num events": 3 113 | }, 114 | "SupParams": { 115 | "OTDR": "OPXOTDR ", 116 | "OTDR S/N": "000", 117 | "module": "SM/1310/1550", 118 | "module S/N": "09811", 119 | "other": "v1.1[670]", 120 | "software": "v9.09 VA=110105", 121 | "supplier": "OptixS" 122 | }, 123 | "blocks": { 124 | "Cksum": { 125 | "name": "Cksum", 126 | "order": 8, 127 | "pos": 32125, 128 | "size": 8, 129 | "version": "2.00" 130 | }, 131 | "DataPts": { 132 | "name": "DataPts", 133 | "order": 4, 134 | "pos": 520, 135 | "size": 31492, 136 | "version": "2.00" 137 | }, 138 | "EmbData": { 139 | "name": "EmbData", 140 | "order": 7, 141 | "pos": 32115, 142 | "size": 10, 143 | "version": "2.00" 144 | }, 145 | "FxdParams": { 146 | "name": "FxdParams", 147 | "order": 2, 148 | "pos": 265, 149 | "size": 92, 150 | "version": "2.00" 151 | }, 152 | "GenParams": { 153 | "name": "GenParams", 154 | "order": 0, 155 | "pos": 148, 156 | "size": 40, 157 | "version": "2.00" 158 | }, 159 | "IITEvents": { 160 | "name": "IITEvents", 161 | "order": 5, 162 | "pos": 32012, 163 | "size": 12, 164 | "version": "2.01" 165 | }, 166 | "IITParams": { 167 | "name": "IITParams", 168 | "order": 6, 169 | "pos": 32024, 170 | "size": 91, 171 | "version": "2.10" 172 | }, 173 | "KeyEvents": { 174 | "name": "KeyEvents", 175 | "order": 3, 176 | "pos": 357, 177 | "size": 163, 178 | "version": "2.00" 179 | }, 180 | "SupParams": { 181 | "name": "SupParams", 182 | "order": 1, 183 | "pos": 188, 184 | "size": 77, 185 | "version": "2.00" 186 | } 187 | }, 188 | "filename": "sample1310_lowDR.sor", 189 | "format": 2, 190 | "mapblock": { 191 | "nblocks": 9, 192 | "nbytes": 148 193 | }, 194 | "version": "2.00" 195 | } 196 | -------------------------------------------------------------------------------- /pyotdr/fxdparams.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from datetime import datetime, timezone 3 | import logging 4 | from . import parts 5 | 6 | logger = logging.getLogger(__name__) 7 | 8 | sep = " :" 9 | unit_map = { 10 | "mt": " (meters)", 11 | "km": " (kilometers)", 12 | "mi": " (miles)", 13 | "kf": " (kilo-ft)", 14 | } 15 | 16 | tracetype = { 17 | "ST": "[standard trace]", 18 | "RT": "[reverse trace]", 19 | "DT": "[difference trace]", 20 | "RF": "[reference]", 21 | } 22 | 23 | 24 | def process(fh, results): 25 | """ 26 | fh: file handle; 27 | results: dict for results; 28 | 29 | we assume mapblock.process() has already been run 30 | """ 31 | bname = "FxdParams" 32 | hsize = len(bname) + 1 # include trailing '\0' 33 | pname = "FxdParams.process():" 34 | ref = None 35 | status = "nok" 36 | 37 | try: 38 | ref = results["blocks"][bname] 39 | startpos = ref["pos"] 40 | fh.seek(startpos) 41 | except: 42 | logger.error("{} {} block starting position unknown".format(pname, bname)) 43 | return status 44 | 45 | format = results["format"] 46 | 47 | if format == 2: 48 | mystr = fh.read(hsize).decode("ascii") 49 | if mystr != bname + "\0": 50 | logger.error("{} incorrect header {}".format(pname, mystr)) 51 | return status 52 | 53 | results[bname] = dict() 54 | xref = results[bname] 55 | 56 | if format == 1: 57 | plist = ( # name, start-pos, length (bytes), type, multiplier, precision, units 58 | # value: display type: 'v' (value) or 'h' (hexidecimal) or 's' (string) 59 | [ 60 | "date/time", 61 | 0, 62 | 4, 63 | "v", 64 | "", 65 | "", 66 | "", 67 | ], # ............... 0-3 seconds in Unix time 68 | [ 69 | "unit", 70 | 4, 71 | 2, 72 | "s", 73 | "", 74 | "", 75 | "", 76 | ], # .................... 4-5 distance units, 2 char (km,mt,ft,kf,mi) 77 | ["wavelength", 6, 2, "v", 0.1, 1, "nm"], # ............ 6-7 wavelength (nm) 78 | # from Andrew Jones 79 | [ 80 | "acquisition offset", 81 | 8, 82 | 4, 83 | "i", 84 | "", 85 | "", 86 | "", 87 | ], # .............. 8-11 acquisition offset; units? 88 | [ 89 | "number of pulse width entries", 90 | 12, 91 | 2, 92 | "v", 93 | "", 94 | "", 95 | "", 96 | ], # .. 12-13 number of pulse width entries 97 | [ 98 | "pulse width", 99 | 14, 100 | 2, 101 | "v", 102 | "", 103 | 0, 104 | "ns", 105 | ], # .......... 14-15 pulse width (ns) 106 | [ 107 | "sample spacing", 108 | 16, 109 | 4, 110 | "v", 111 | 1e-8, 112 | "", 113 | "usec", 114 | ], # .. 16-19 sample spacing (in usec) 115 | [ 116 | "num data points", 117 | 20, 118 | 4, 119 | "v", 120 | "", 121 | "", 122 | "", 123 | ], # ....... 20-23 number of data points 124 | [ 125 | "index", 126 | 24, 127 | 4, 128 | "v", 129 | 1e-5, 130 | 6, 131 | "", 132 | ], # ................ 24-27 index of refraction 133 | [ 134 | "BC", 135 | 28, 136 | 2, 137 | "v", 138 | -0.1, 139 | 2, 140 | "dB", 141 | ], # ................. 28-29 backscattering coeff 142 | [ 143 | "num averages", 144 | 30, 145 | 4, 146 | "v", 147 | "", 148 | "", 149 | "", 150 | ], # .......... 30-33 number of averages 151 | ["range", 34, 4, "v", 2e-5, 6, "km"], # .............. 34-37 range (km) 152 | # from Andrew Jones 153 | ["front panel offset", 38, 4, "i", "", "", ""], # ................ 38-41 154 | [ 155 | "noise floor level", 156 | 42, 157 | 2, 158 | "v", 159 | "", 160 | "", 161 | "", 162 | ], # ................. 42-43 unsigned 163 | ["noise floor scaling factor", 44, 2, "i", "", "", ""], # ........ 44-45 164 | [ 165 | "power offset first point", 166 | 46, 167 | 2, 168 | "v", 169 | "", 170 | "", 171 | "", 172 | ], # .......... 46-47 unsigned 173 | ["loss thr", 48, 2, "v", 0.001, 3, "dB"], # .......... 48-49 loss threshold 174 | [ 175 | "refl thr", 176 | 50, 177 | 2, 178 | "v", 179 | -0.001, 180 | 3, 181 | "dB", 182 | ], # ......... 50-51 reflection threshold 183 | [ 184 | "EOT thr", 185 | 52, 186 | 2, 187 | "v", 188 | 0.001, 189 | 3, 190 | "dB", 191 | ], # ............ 52-53 end-of-transmission threshold 192 | ) 193 | else: 194 | plist = ( # name, start-pos, length (bytes), type, multiplier, precision, units 195 | # value: display type: 'v' (value) or 'h' (hexidecimal) or 's' (string) 196 | [ 197 | "date/time", 198 | 0, 199 | 4, 200 | "v", 201 | "", 202 | "", 203 | "", 204 | ], # ............... 0-3 seconds in Unix time 205 | [ 206 | "unit", 207 | 4, 208 | 2, 209 | "s", 210 | "", 211 | "", 212 | "", 213 | ], # .................... 4-5 distance units, 2 char (km,mt,ft,kf,mi) 214 | ["wavelength", 6, 2, "v", 0.1, 1, "nm"], # ............ 6-7 wavelength (nm) 215 | # from Andrew Jones 216 | [ 217 | "acquisition offset", 218 | 8, 219 | 4, 220 | "i", 221 | "", 222 | "", 223 | "", 224 | ], # .............. 8-11 acquisition offset; units? 225 | [ 226 | "acquisition offset distance", 227 | 12, 228 | 4, 229 | "i", 230 | "", 231 | "", 232 | "", 233 | ], # .... 12-15 acquisition offset distance; units? 234 | [ 235 | "number of pulse width entries", 236 | 16, 237 | 2, 238 | "v", 239 | "", 240 | "", 241 | "", 242 | ], # .. 16-17 number of pulse width entries 243 | [ 244 | "pulse width", 245 | 18, 246 | 2, 247 | "v", 248 | "", 249 | 0, 250 | "ns", 251 | ], # .......... 18-19 pulse width (ns) 252 | [ 253 | "sample spacing", 254 | 20, 255 | 4, 256 | "v", 257 | 1e-8, 258 | "", 259 | "usec", 260 | ], # .. 20-23 sample spacing (usec) 261 | [ 262 | "num data points", 263 | 24, 264 | 4, 265 | "v", 266 | "", 267 | "", 268 | "", 269 | ], # ....... 24-27 number of data points 270 | [ 271 | "index", 272 | 28, 273 | 4, 274 | "v", 275 | 1e-5, 276 | 6, 277 | "", 278 | ], # ................ 28-31 index of refraction 279 | [ 280 | "BC", 281 | 32, 282 | 2, 283 | "v", 284 | -0.1, 285 | 2, 286 | "dB", 287 | ], # ................. 32-33 backscattering coeff 288 | [ 289 | "num averages", 290 | 34, 291 | 4, 292 | "v", 293 | "", 294 | "", 295 | "", 296 | ], # .......... 34-37 number of averages 297 | # from Dmitry Vaygant: 298 | [ 299 | "averaging time", 300 | 38, 301 | 2, 302 | "v", 303 | 0.1, 304 | 0, 305 | "sec", 306 | ], # ..... 38-39 averaging time in seconds 307 | [ 308 | "range", 309 | 40, 310 | 4, 311 | "v", 312 | 2e-5, 313 | 6, 314 | "km", 315 | ], # .............. 40-43 range (km); note x2 316 | # from Andrew Jones 317 | ["acquisition range distance", 44, 4, "i", "", "", ""], # ........ 44-47 318 | ["front panel offset", 48, 4, "i", "", "", ""], # ................ 48-51 319 | [ 320 | "noise floor level", 321 | 52, 322 | 2, 323 | "v", 324 | "", 325 | "", 326 | "", 327 | ], # ................. 52-53 unsigned 328 | ["noise floor scaling factor", 54, 2, "i", "", "", ""], # ........ 54-55 329 | [ 330 | "power offset first point", 331 | 56, 332 | 2, 333 | "v", 334 | "", 335 | "", 336 | "", 337 | ], # .......... 56-57 unsigned 338 | ["loss thr", 58, 2, "v", 0.001, 3, "dB"], # .......... 58-59 loss threshold 339 | [ 340 | "refl thr", 341 | 60, 342 | 2, 343 | "v", 344 | -0.001, 345 | 3, 346 | "dB", 347 | ], # ......... 60-61 reflection threshold 348 | [ 349 | "EOT thr", 350 | 62, 351 | 2, 352 | "v", 353 | 0.001, 354 | 3, 355 | "dB", 356 | ], # ............ 62-63 end-of-transmission threshold 357 | [ 358 | "trace type", 359 | 64, 360 | 2, 361 | "s", 362 | "", 363 | "", 364 | "", 365 | ], # ............. 64-65 trace type (ST,RT,DT, or RF) 366 | # from Andrew Jones 367 | ["X1", 66, 4, "i", "", "", ""], # ............. 66-69 368 | ["Y1", 70, 4, "i", "", "", ""], # ............. 70-73 369 | ["X2", 74, 4, "i", "", "", ""], # ............. 74-77 370 | ["Y2", 78, 4, "i", "", "", ""], # ............. 78-81 371 | ) 372 | 373 | status = _process_fields(fh, plist, results) 374 | 375 | # read the rest of the block (just in case) 376 | endpos = results["blocks"][bname]["pos"] + results["blocks"][bname]["size"] 377 | fh.read(endpos - fh.tell()) 378 | status = "ok" 379 | return status 380 | 381 | 382 | # ================================================================ 383 | def _process_fields(fh, plist, results): 384 | bname = "FxdParams" 385 | xref = results[bname] 386 | 387 | # functions to use 388 | # 'h': get_hexstring 389 | # 'v': get_uint 390 | # 's': get_string 391 | # 'i': get_signed 392 | count = 0 393 | for field in plist: 394 | name = field[0] 395 | fsize = field[2] 396 | ftype = field[3] 397 | scale = field[4] 398 | dgt = field[5] 399 | unit = field[6] 400 | xstr = "" 401 | 402 | if ftype == "i": 403 | val = parts.get_signed(fh, fsize) 404 | xstr = val 405 | elif ftype == "v": 406 | val = parts.get_uint(fh, fsize) 407 | if scale != "": 408 | val *= scale 409 | if dgt != "": 410 | fmt = "%%.%df" % dgt 411 | xstr = fmt % val 412 | else: 413 | xstr = val 414 | 415 | elif ftype == "h": 416 | xstr = parts.get_hex(fh, fsize) 417 | elif ftype == "s": 418 | xstr = fh.read(fsize).decode("utf-8") 419 | else: 420 | val = fh.read(fsize) 421 | xstr = val 422 | 423 | # ................................. 424 | if name == "date/time": 425 | xstr = datetime.fromtimestamp(val, timezone.utc).strftime( 426 | "%a %b %d %H:%M:%S %Y" 427 | ) + (" (%d sec)" % val) 428 | elif name == "unit": 429 | xstr += unit_map[xstr] 430 | elif name == "trace type": 431 | try: 432 | xstr += tracetype[xstr] 433 | except: 434 | pass 435 | 436 | # don't bother even trying if there are multiple pulse width entries; too lazy 437 | # to restructure code to handle this case 438 | if name == "number of pulse width entries" and val > 1: 439 | logger.warning( 440 | "Cannot handle multiple pulse width entries ({}); aborting".format(val) 441 | ) 442 | # TODO should raise an exception instead of brutaly exit 443 | sys.exit() 444 | 445 | # ................................. 446 | logger.debug("%s %d. %s: %s %s" % (sep, count, name, xstr, unit)) 447 | 448 | xref[name] = xstr if unit == "" else str(xstr) + " " + unit 449 | count += 1 450 | 451 | # corrrections/adjustment: 452 | ior = float(xref["index"]) 453 | ss = xref["sample spacing"].split(" ")[0] 454 | dx = float(ss) * parts.sol / ior 455 | xref["range"] = dx * int(xref["num data points"]) 456 | xref["resolution"] = dx * 1000.0 # in meters 457 | 458 | logger.debug("%s [adjusted for refractive index]" % (sep)) 459 | logger.debug("%s resolution = %.14f m" % (sep, xref["resolution"])) 460 | logger.debug("%s range = %.13f km" % (sep, xref["range"])) 461 | 462 | status = "ok" 463 | 464 | return status 465 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | {one line to give the program's name and a brief idea of what it does.} 635 | Copyright (C) {year} {name of author} 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | {project} Copyright (C) {year} {fullname} 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | --------------------------------------------------------------------------------