├── .gitignore ├── src ├── commons.l10n.csv ├── python │ └── noderunner.py └── commons.coffee ├── tools ├── Makefile ├── json-request ├── tmpl │ └── easydb-server.yml.tmpl ├── jsondiff.py ├── unimport_scss.py ├── solution-plugins.make ├── base-plugins.make ├── l10n2json.py ├── analyze-mask.py ├── upgrade_boost_debian9.sh ├── easydb-5-macros.inc ├── easydb-5-macros.inc.proxy ├── merge_loca_csv.py └── easy5-dev.init ├── README.md └── LICENSE /.gitignore: -------------------------------------------------------------------------------- 1 | *.coffee.js 2 | .sass-cache 3 | build 4 | build-stamp-* 5 | *~ 6 | \#*\# 7 | \.#* 8 | commit.txt 9 | .idea 10 | *.pyc 11 | 12 | -------------------------------------------------------------------------------- /src/commons.l10n.csv: -------------------------------------------------------------------------------- 1 | key,de-DE,en-US 2 | custom.data.type.commons.popover.choose.label,Auswahl treffen,choose 3 | custom.data.type.commons.modal.form.popup.loadingstring,Lade Informationen,loading 4 | custom.data.type.commons.controls.search.label,Suchen,Search 5 | custom.data.type.commons.controls.detailinfo.label,Detailinfo, Details 6 | custom.data.type.commons.controls.calluri.label,Uri aufrufen,Call URI 7 | custom.data.type.commons.controls.delete.label,Löschen,Delete 8 | -------------------------------------------------------------------------------- /tools/Makefile: -------------------------------------------------------------------------------- 1 | all: 2 | @echo install tools: 3 | @echo " make install-home # install to $$HOME/bin" 4 | @echo " make install-usrlocal # install to /usr/local/bin" 5 | @echo " make install PREFIX=/somewhere # install to /somewhere/bin" 6 | 7 | TOOLS_TO_INSTALL = \ 8 | l10n2json.py \ 9 | unimport_scss.py 10 | 11 | install: 12 | [ ! -z "${PREFIX}" ] # ensure PREFIX is set 13 | mkdir -p "${PREFIX}/bin" 14 | for tool in ${TOOLS_TO_INSTALL}; do \ 15 | install -m 755 "$$tool" "${PREFIX}/bin/easydb-$$tool" ; \ 16 | done 17 | 18 | install-home: 19 | $(MAKE) install PREFIX="${HOME}" 20 | 21 | install-usrlocal: 22 | $(MAKE) install PREFIX=/usr/local 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # easydb-library 2 | Library to support Plugin Development for easydb 3 | 4 | ## Instead of: 5 | ``` 6 | class CustomDataTypePLUGINNAME extends CustomDataType 7 | ``` 8 | 9 | ## Use: 10 | ``` 11 | class CustomDataTypePLUGINNAME extends CustomDataTypeWithCommons 12 | ``` 13 | ### And 14 | * Remove the methods, which shell be used from *commons.coffee* from Plugincode 15 | * Add the *commons.coffee* to your Makefile and prepend to your Pluginsource 16 | * Rename the Autocompletion-Searchbar to "searchbarInput" 17 | * CSS: 18 | * Class of Popover: commonPlugin_Popover 19 | * Class of Selects in Popover: commonPlugin_Select 20 | * Class of Inputs in Popover: commonPlugin_Input 21 | 22 | -------------------------------------------------------------------------------- /tools/json-request: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import json, os, shlex, sys 4 | from optparse import OptionParser 5 | 6 | parser = OptionParser(usage='usage: %prog [options] [file]') 7 | parser.add_option('-e', '--exec', 8 | dest='exec', action="store_true", default=False, 9 | help='execute request with curl after parsing') 10 | 11 | (options, args) = parser.parse_args() 12 | 13 | f = sys.stdin 14 | if len(args) > 0: 15 | f = open(args[0], 'r') 16 | 17 | p = json.load(f) 18 | if "request" in p: 19 | req = p["request"] 20 | else: 21 | req = p 22 | 23 | url = req["url"] 24 | body = req.get('body') 25 | headers = req.get('headers', {}) 26 | method = req.get('method', 'GET') 27 | 28 | cmd = [ 29 | "curl", 30 | "-X", method, 31 | ] 32 | 33 | for k, v in headers.items(): 34 | if v is not None: 35 | cmd.extend(['-H', "{}: {}".format(k, v)]) 36 | 37 | cmd.append(url) 38 | if body and len(body): 39 | 40 | cmd.extend(['--data-binary', body]) 41 | 42 | cmdline = " ".join(map(shlex.quote, cmd)) 43 | 44 | if options.exec: 45 | os.system(cmdline) 46 | else: 47 | print(cmdline) 48 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Programmfabrik GmbH 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /tools/tmpl/easydb-server.yml.tmpl: -------------------------------------------------------------------------------- 1 | %YAML 1.2 2 | 3 | include_before: 4 | - ../../base/base.yml 5 | - ../../../plugins/base-plugins.yml 6 | 7 | solution: 8 | name: $SOLUTION_NAME 9 | 10 | schema: 11 | dsn: port=$DB_PORT user=$DB_USER dbname=$DB_NAME 12 | user_dir: $USER_DIR 13 | 14 | logging: 15 | pf: $LOGLEVEL 16 | 17 | server: 18 | external_url: $SERVER_EXTERNAL_URL 19 | directory: 20 | pflib: $PFLIB_DIR 21 | imexporter: $IMEXPORTER_DIR 22 | output: $OUTPUT_DIR 23 | logfile: $LOG_FILE 24 | server_errors: $SERVER_ERROR_DIR 25 | elasticsearch: $ELASTIC_SEARCH_DIR 26 | exporter: 27 | num_workers: 1 28 | imexporter: 29 | num_services: 0 30 | socket: $IMEXPORTER_SOCKET 31 | frontend: 32 | slow: 33 | num_services: 1 34 | socket: $FRONTEND_SLOW_SOCKET 35 | medium: 36 | num_services: 1 37 | socket: $FRONTEND_MEDIUM_SOCKET 38 | fast: 39 | num_services: 1 40 | socket: $FRONTEND_FAST_SOCKET 41 | upload: 42 | num_services: 1 43 | socket: $UPLOAD_SOCKET 44 | indexer: 45 | num_processes: 1 46 | 47 | 48 | eas: 49 | instance: $EAS_INSTANCE 50 | url: $EAS_URL 51 | external_url: $EAS_EXTERNAL_URL 52 | 53 | elasticsearch: 54 | url: $ELASTIC_SEARCH_URL 55 | 56 | # vim:set ts=2 et ft=yaml: 57 | -------------------------------------------------------------------------------- /tools/jsondiff.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import sys 4 | import json 5 | 6 | if len(sys.argv) < 3: 7 | print ('wrong arguments') 8 | exit(1) 9 | 10 | mappings = [] 11 | for i in [1,2]: 12 | with open(sys.argv[i]) as f: 13 | js = json.load(f) 14 | mappings.append(js[list(js.keys())[0]]) 15 | 16 | def jsonerror(path, error): 17 | print('[{}] {}'.format('.'.join(path), error)) 18 | 19 | def jsondiff(old, new, path=[]): 20 | if isinstance(old, dict): 21 | if not isinstance(new, dict): 22 | jsonerror(path, 'expecting object') 23 | return 24 | for k in old.keys(): 25 | if k not in new: 26 | jsonerror(path + [k], 'not found') 27 | continue 28 | jsondiff(old[k], new[k], path + [k]) 29 | elif isinstance(old, list): 30 | if not isinstance(new, list): 31 | jsonerror(path, 'expecting list') 32 | return 33 | if len(old) != len(new): 34 | jsonerror(path, 'list length {} != {}'.format(len(old), len(new))) 35 | return 36 | for i in range(len(new)): 37 | jsondiff(old[i], new[i], path + [str(i)]) 38 | elif old != new: 39 | jsonerror(path, 'expecting {}, got {}'.format(old, new)) 40 | 41 | jsondiff(*mappings) 42 | -------------------------------------------------------------------------------- /tools/unimport_scss.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import os, sys, re 4 | 5 | def import_file(filename, out, parse_import_re, container = '
', base_path = '.'): 6 | this_abs = os.path.abspath(os.path.join(base_path, filename)) 7 | 8 | next_base_path = os.path.join(base_path, os.path.dirname(filename)) 9 | try: 10 | with open(os.path.join(base_path, filename)) as f: 11 | for line in f.readlines(): 12 | if line.startswith("@import"): 13 | m = parse_import_re.match(line) 14 | if not m: 15 | if not line.startswith("@import url("): 16 | sys.stderr.write("failed to parse import line: %s" % line) 17 | out.write("/* failed to resolve import line, include : */\n") 18 | out.write(line) 19 | else: 20 | next_abs = os.path.abspath(os.path.join(next_base_path, m.group(1))) 21 | 22 | #print(' "%s" -> "%s";' % (this_abs, next_abs)) 23 | 24 | out.write("/* import %s */\n" % filename) 25 | import_file(m.group(1), out, parse_import_re, filename, next_base_path) 26 | else: 27 | out.write(line) 28 | except IOError as e: 29 | sys.stderr.write("failed to include %s (in %s): %s" % ( 30 | os.path.join(base_path, filename), filename, e)) 31 | out.write("/* failed to include %s (required in %s) */\n" % (filename, container)) 32 | 33 | if len(sys.argv) != 3: 34 | sys.stderr.write("usage: %s \n" % sys.argv[0]) 35 | sys.exit(1) 36 | 37 | parse_import_re = re.compile('^@import\s+["\']([^"\']*)["\'];.*$') 38 | with open(sys.argv[2], "w") as out: 39 | import_file(sys.argv[1], out, parse_import_re) 40 | -------------------------------------------------------------------------------- /tools/solution-plugins.make: -------------------------------------------------------------------------------- 1 | WEB = build/webfrontend 2 | # L10N2JSON := easydb-l10n2json.py 3 | SELF_DIR := $(dir $(lastword $(MAKEFILE_LIST))) 4 | 5 | L10N2JSON = python2 $(SELF_DIR)/l10n2json.py 6 | 7 | JS ?= $(WEB)/${PLUGIN_NAME}.js 8 | L10N = build-stamp-l10n 9 | JS_FILES ?= 10 | SCSS_FILES ?= 11 | 12 | CSS ?= $(WEB)/${PLUGIN_NAME}.scss 13 | 14 | PLUGIN_PATH ?= $(PLUGIN_NAME) 15 | 16 | export SASS_PATH=. 17 | scss_call = sass 18 | 19 | css: $(CSS) 20 | 21 | $(CSS): $(SCSS_FILES) 22 | mkdir -p $(dir $@) 23 | cat $(SCSS_FILES) | $(scss_call) --stdin > $(CSS) 24 | 25 | ${JS}: $(subst .coffee,.coffee.js,${COFFEE_FILES}) $(JS_FILES) 26 | mkdir -p $(dir $@) 27 | cat $^ > $@ 28 | 29 | ${SCSS}: ${SCSS_FILES} 30 | mkdir -p $(dir $@) 31 | cat $^ > $@ 32 | 33 | build-stamp-l10n: $(CULTURES_CSV) $(L10N_FILES) 34 | mkdir -p $(WEB)/l10n 35 | $(L10N2JSON) $(CULTURES_CSV) $(L10N_FILES) $(WEB)/l10n 36 | touch $@ 37 | 38 | %.coffee.js: %.coffee 39 | coffee -b -p --compile "$^" > "$@" || ( rm -f "$@" ; false ) 40 | 41 | $(WEB)/%: src/webfrontend/% 42 | mkdir -p $(dir $@) 43 | cp $^ $@ 44 | 45 | install: 46 | 47 | uninstall: 48 | 49 | install-solution: ${INSTALL_FILES} 50 | [ ! -z "${INSTALL_PREFIX}" ] 51 | mkdir -p ${INSTALL_PREFIX}/solution-${SOLUTION}/solutions/${SOLUTION}/plugins/${PLUGIN_PATH} 52 | for f in ${INSTALL_FILES}; do \ 53 | mkdir -p ${INSTALL_PREFIX}/solution-${SOLUTION}/solutions/${SOLUTION}/plugins/${PLUGIN_PATH}/`dirname $$f`; \ 54 | if [ -d "$$f" ]; then \ 55 | cp -Pr $$f ${INSTALL_PREFIX}/solution-${SOLUTION}/solutions/${SOLUTION}/plugins/${PLUGIN_PATH}/`dirname $$f`; \ 56 | else \ 57 | cp $$f ${INSTALL_PREFIX}/solution-${SOLUTION}/solutions/${SOLUTION}/plugins/${PLUGIN_PATH}/$$f; \ 58 | fi; \ 59 | done 60 | 61 | google_csv: 62 | chmod u+w $(L10N_FILES) 63 | curl --silent -L -o - "https://docs.google.com/spreadsheets/u/1/d/$(L10N_GOOGLE_KEY)/export?format=csv&id=$(L10N_GOOGLE_KEY)&gid=$(L10N_GOOGLE_GID)" | sed -e 's/[[:space:]]*$$//' > $(L10N_FILES) 64 | chmod a-w $(L10N_FILES) 65 | $(MAKE) build-stamp-l10n 66 | 67 | clean-base: 68 | rm -f $(L10N) $(subst .coffee,.coffee.js,${COFFEE_FILES}) $(JS) $(SCSS) 69 | 70 | .PHONY: all build clean clean-base code install uninstall install-server google_csv 71 | 72 | # vim:set ft=make: 73 | -------------------------------------------------------------------------------- /src/python/noderunner.py: -------------------------------------------------------------------------------- 1 | # coding=utf8 2 | 3 | import os 4 | import sys 5 | from subprocess import Popen, PIPE 6 | 7 | 8 | def call(config, script, parameters='', additional_nodepaths=[], logger=None): 9 | 10 | node_runner_binary, node_runner_app, node_paths = get_paths(config) 11 | if node_runner_binary is None: 12 | raise Exception('node_runner_binary_not_found') 13 | if node_runner_app is None: 14 | raise Exception('node_runner_app_not_found') 15 | 16 | command = node_runner_binary.split(' ') + [node_runner_app, script, '-'] 17 | 18 | node_paths += additional_nodepaths 19 | node_env = { 20 | 'NODE_PATH': ':'.join([os.path.abspath(n) for n in node_paths]) 21 | } 22 | 23 | if logger is not None: 24 | logger.debug('noderunner call: %s' % ' '.join(command)) 25 | logger.debug('noderunner stdin: %s' % parameters) 26 | logger.debug('noderunner environment: %s' % node_env) 27 | 28 | p1 = Popen( 29 | command, 30 | shell=False, 31 | stdin=PIPE, 32 | stdout=PIPE, 33 | stderr=PIPE, 34 | env=node_env 35 | ) 36 | 37 | out, err = p1.communicate(input=parameters) 38 | exit_code = p1.returncode 39 | 40 | if logger is not None: 41 | logger.debug('noderunner call: %s bytes from stdout, %s bytes from stderr, exit code: %s ==> %s' 42 | % (len(out), len(err), exit_code, 'OK' if exit_code == 0 else 'ERROR')) 43 | if (exit_code != 0): 44 | logger.error('noderunner call: exit code: %s, error: %s, out: %s' % (exit_code, err, out)) 45 | 46 | return unicode(out, encoding='utf-8'), unicode(err, encoding='utf-8'), exit_code 47 | 48 | 49 | def get_paths(config): 50 | 51 | if not 'system' in config or not 'nodejs' in config['system']: 52 | return None, None, None 53 | 54 | for k in ['node_runner_binary', 'node_runner_app', 'node_modules']: 55 | if k not in config['system']['nodejs']: 56 | return None, None, None 57 | 58 | node_runner_binary = config['system']['nodejs']['node_runner_binary'] 59 | if node_runner_binary is None: 60 | return None, None, None 61 | 62 | node_runner_binary = os.path.abspath(node_runner_binary) 63 | 64 | node_runner_app = config['system']['nodejs']['node_runner_app'] 65 | if node_runner_app is None: 66 | return None, None, None 67 | 68 | node_runner_app = os.path.abspath(node_runner_app) 69 | 70 | node_modules = config['system']['nodejs']['node_modules'] 71 | if node_modules is None: 72 | node_path = set() 73 | else: 74 | node_path = set(node_modules.split(':')) 75 | 76 | return node_runner_binary, node_runner_app, list(node_path) 77 | -------------------------------------------------------------------------------- /tools/base-plugins.make: -------------------------------------------------------------------------------- 1 | WEB = build/webfrontend 2 | WEBHOOKS = build/webhooks 3 | 4 | SELF_DIR := $(dir $(lastword $(MAKEFILE_LIST))) 5 | 6 | L10N2JSON = python2 $(SELF_DIR)/l10n2json.py 7 | 8 | JS ?= $(WEB)/${PLUGIN_NAME}.js 9 | WEBHOOK_NAME ?= ${PLUGIN_NAME} 10 | WEBHOOK_JS ?= ${WEBHOOKS}/${WEBHOOK_NAME}.js 11 | CSS ?= $(WEB)/${PLUGIN_NAME}.css 12 | L10N = build-stamp-l10n 13 | 14 | WEBFRONTEND_SASS ?= sass 15 | 16 | PLUGIN_PATH ?= $(PLUGIN_NAME) 17 | 18 | css: $(CSS) 19 | 20 | $(CSS): $(SCSS_FILES) 21 | mkdir -p $(dir $@) 22 | cat $(SCSS_FILES) | $(WEBFRONTEND_SASS) --stdin > $(CSS) || ( rm -f $(CSS) ; false ) 23 | 24 | ${JS}: $(subst .coffee,.coffee.js,${COFFEE_FILES}) 25 | mkdir -p $(dir $@) 26 | cat $^ > $@ 27 | 28 | ${WEBHOOK_JS}: $(subst .coffee,.coffee.js,${WEBHOOK_FILES}) 29 | mkdir -p $(dir $@) 30 | cat $^ > $@ 31 | 32 | build-stamp-l10n: $(L10N_FILES) 33 | mkdir -p $(WEB)/l10n 34 | $(L10N2JSON) $(L10N_FILES) $(WEB)/l10n 35 | touch $@ 36 | 37 | buildinfojson: 38 | repo=`git remote get-url origin | sed -e 's/\.git$$//' -e 's#.*[/\\]##'` ;\ 39 | rev=`git show --no-patch --format=%H` ;\ 40 | lastchanged=`git show --no-patch --format=%ad --date=format:%Y-%m-%dT%T%z` ;\ 41 | builddate=`date +"%Y-%m-%dT%T%z"` ;\ 42 | echo '{' > build-info.json ;\ 43 | echo ' "repository": "'$$repo'",' >> build-info.json ;\ 44 | echo ' "rev": "'$$rev'",' >> build-info.json ;\ 45 | echo ' "lastchanged": "'$$lastchanged'",' >> build-info.json ;\ 46 | echo ' "builddate": "'$$builddate'"' >> build-info.json ;\ 47 | echo '}' >> build-info.json 48 | 49 | %.coffee.js: %.coffee 50 | coffee -b -p --compile "$^" > "$@" || ( rm -f "$@" ; false ) 51 | 52 | $(WEB)/%: src/webfrontend/% 53 | mkdir -p $(dir $@) 54 | cp $^ $@ 55 | 56 | install: 57 | 58 | uninstall: 59 | 60 | google_csv: 61 | chmod u+w $(L10N_FILES) 62 | curl --silent -L -o - "https://docs.google.com/spreadsheets/u/1/d/$(L10N_GOOGLE_KEY)/export?format=csv&id=$(L10N_GOOGLE_KEY)&gid=$(L10N_GOOGLE_GID)" | sed -e 's/[[:space:]]*$$//' > $(L10N_FILES) 63 | chmod a-w $(L10N_FILES) 64 | $(MAKE) build-stamp-l10n 65 | 66 | 67 | install-server: ${INSTALL_FILES} 68 | [ ! -z "${INSTALL_PREFIX}" ] 69 | mkdir -p ${INSTALL_PREFIX}/server/base/plugins/${PLUGIN_PATH} 70 | for f in ${INSTALL_FILES}; do \ 71 | mkdir -p ${INSTALL_PREFIX}/server/base/plugins/${PLUGIN_PATH}/`dirname $$f`; \ 72 | if [ -d "$$f" ]; then \ 73 | cp -Pr $$f ${INSTALL_PREFIX}/server/base/plugins/${PLUGIN_PATH}/`dirname $$f`; \ 74 | else \ 75 | cp $$f ${INSTALL_PREFIX}/server/base/plugins/${PLUGIN_PATH}/$$f; \ 76 | fi; \ 77 | done 78 | if [ -f "build-info.json" ]; then \ 79 | cp "build-info.json" "${INSTALL_PREFIX}/server/base/plugins/${PLUGIN_PATH}/build-info.json"; \ 80 | fi 81 | 82 | clean-base: 83 | rm -f $(L10N) $(subst .coffee,.coffee.js,${COFFEE_FILES}) $(JS) $(SCSS) 84 | rm -f $(subst .coffee,.coffee.js,${WEBHOOK_FILES}) $(WEBHOOK_JS) 85 | rm -f $(WEB)/l10n/*.json 86 | rm -f build-stamp-l10n 87 | rm -rf build 88 | 89 | wipe-base: clean-base 90 | find . -name '*~' -or -name '#*#' | xargs rm -f 91 | 92 | .PHONY: all build buildinfojson clean clean-base wipe-base code install uninstall install-server google_csv 93 | 94 | # vim:set ft=make: 95 | -------------------------------------------------------------------------------- /tools/l10n2json.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2 2 | # coding=utf8 3 | import sys 4 | import csv 5 | import os 6 | import re 7 | import json 8 | 9 | # print 'Number of arguments:', len(sys.argv), 'arguments.' 10 | # print 'Argument List:', str(sys.argv) 11 | 12 | if len(sys.argv) < 3: 13 | print "Usage: l10n2json.py " 14 | 15 | directory = sys.argv[ len(sys.argv)-1 ] 16 | 17 | target_dict = {} 18 | 19 | if not os.path.exists(directory): 20 | print "Target-Directory does not exists:", directory 21 | exit(1) 22 | 23 | cultures = [] 24 | cultures_plain = [] 25 | 26 | EN_US_CULTURE = "en-US" 27 | 28 | def getCultureValue(_row, _culture, i = 0): 29 | _value = _row[_culture] 30 | if _value == None or _value == '': 31 | if i >= len(cultures_plain): 32 | return "" 33 | nextCulture = cultures_plain[i] 34 | i += 1 35 | return getCultureValue(_row, nextCulture, i) 36 | return _value.strip() 37 | 38 | 39 | for idx in range(1, len(sys.argv)-1): 40 | print "#"+str(idx), sys.argv[idx] 41 | 42 | with open(sys.argv[idx], 'rb') as csvfile: 43 | reader = csv.DictReader(csvfile, delimiter=',', quotechar='"') 44 | 45 | for culture in reader.fieldnames: 46 | if culture == "key": 47 | continue 48 | elif re.match("[a-z]{2}\-[A-Z]{2}", culture) == None: 49 | print "Omitting column \""+culture+"\", culture format \"az-AZ\" not matching." 50 | else: 51 | cultures.append({"code": culture}) 52 | cultures_plain.append(culture) 53 | 54 | cultures_plain = sorted(cultures_plain, key=lambda item: 0 if item == EN_US_CULTURE else 1) 55 | 56 | line = 1 # the first line was skipped as it is contains the keys for the dict 57 | for row in reader: 58 | 59 | loca_key = row["key"].strip() 60 | if loca_key == None or loca_key == '': 61 | continue 62 | 63 | for culture in row.keys(): 64 | if culture not in cultures_plain: 65 | continue 66 | 67 | if not culture in target_dict: 68 | if culture == None: 69 | print 70 | print "WARNING: Line %s: Ignoring extra value: %s. Row:" % ((line+1), row[culture]), repr(row) 71 | continue 72 | 73 | target_dict[culture] = {} 74 | 75 | target_dict[culture][loca_key] = getCultureValue(row, culture) 76 | line = line + 1 77 | 78 | for culture, loca_keys in target_dict.iteritems(): 79 | # we omit columns which don't look like "culture" columns 80 | 81 | filename = directory+"/"+culture+".json" 82 | with open(filename, 'w') as outfile: 83 | dump_dict = {} 84 | dump_dict[culture] = target_dict[culture] 85 | json.dump(dump_dict, outfile, ensure_ascii=False, sort_keys=True, indent=4) 86 | print "Wrote", filename, "with", len(target_dict[culture].keys()), "loca keys." 87 | 88 | filename = directory+"/cultures.json" 89 | with open(filename, 'w') as outfile: 90 | json.dump(cultures, outfile, ensure_ascii=False, sort_keys=True, indent=4) 91 | print "Wrote", filename, "with", repr(cultures_plain) 92 | -------------------------------------------------------------------------------- /tools/analyze-mask.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os, sys 4 | from lxml.etree import Element, ElementTree 5 | 6 | 7 | class XmlBase (object): 8 | nsmap = { 9 | 'ds': 'http://schema.programmfabrik.de/database-schema/0.1', 10 | 'es': 'http://schema.programmfabrik.de/easydb-database-schema/0.1', 11 | 'em': 'http://schema.programmfabrik.de/easydb-mask-schema/0.1', 12 | } 13 | 14 | class Searchable (XmlBase): 15 | def __init__(self, xml): 16 | self.search_expert = xml.find('em:search/em:expert', self.nsmap).attrib['enabled'] == '1' 17 | self.search_facet = xml.find('em:search/em:facet', self.nsmap).attrib['enabled'] == '1' 18 | self.search_fulltext = xml.find('em:search/em:fulltext', self.nsmap).attrib['enabled'] == '1' 19 | self.search_flags = \ 20 | (self.search_expert and 'E' or '') + \ 21 | (self.search_facet and 'F' or '') + \ 22 | (self.search_fulltext and 'V' or '') 23 | 24 | 25 | class Field (Searchable): 26 | def __init__(self, xml): 27 | super(Field, self).__init__(xml) 28 | self.name = xml.attrib.get('column-name-hint') 29 | 30 | class LinkedTable (Searchable): 31 | def __init__(self, xml): 32 | super(LinkedTable, self).__init__(xml) 33 | self.name = xml.attrib.get('other-table-hint') 34 | 35 | class ReverseLinkedTable (LinkedTable): 36 | def __init__(self, xml): 37 | super(ReverseLinkedTable, self).__init__(xml) 38 | 39 | class Analyzer (XmlBase): 40 | @classmethod 41 | def analyze_masks(cls, maskxmlfile, mask_name): 42 | tree = ElementTree() 43 | tree.parse(maskxmlfile) 44 | root = tree.getroot() 45 | 46 | if mask_name is not None: 47 | mask = root.find("em:mask[@name='{0}']".format(mask_name), cls.nsmap) 48 | if mask is None: 49 | sys.stderr.write("failed to find mask '{0}'\n".format(mask_name)) 50 | sys.exit(1) 51 | cls._analyze_mask(mask) 52 | else: 53 | for mask in root.findall('em:mask', cls.nsmap): 54 | cls._analyze_mask(mask) 55 | 56 | @classmethod 57 | def _analyze_mask(cls, mask, indent = ''): 58 | print("{0}M:{1}".format(indent, mask.get('name', ''))) 59 | for rlinkedxml in mask.findall('em:fields/em:reverse-linked-table', cls.nsmap): 60 | rlinked = ReverseLinkedTable(rlinkedxml) 61 | if len(rlinked.search_flags): 62 | print("{0} R:{1} ({2})".format(indent, rlinked.name, rlinked.search_flags)) 63 | maskxml = rlinkedxml.find('em:mask', cls.nsmap) 64 | if maskxml is not None: 65 | cls._analyze_mask(maskxml, indent + ' ') 66 | for linkedxml in mask.findall('em:fields/em:linked-table', cls.nsmap): 67 | linked = LinkedTable(linkedxml) 68 | if len(linked.search_flags): 69 | print("{0} N:{1} ({2})".format(indent, linked.name, linked.search_flags)) 70 | maskxml = linkedxml.find('em:mask', cls.nsmap) 71 | if maskxml is not None: 72 | cls._analyze_mask(maskxml, indent + ' ') 73 | for fieldxml in mask.findall('em:fields/em:field', cls.nsmap): 74 | field = Field(fieldxml) 75 | if len(field.search_flags): 76 | print("{0} F:{1} ({2})".format(indent, field.name, field.search_flags)) 77 | 78 | 79 | if __name__ == '__main__': 80 | if len(sys.argv) < 2: 81 | sys.stderr.write('usage: {0} []\n'.format(sys.argv[0])) 82 | sys.exit(1) 83 | if not os.path.isfile(sys.argv[1]): 84 | sys.stderr.write('failed to find {0}\n'.format(sys.argv[1])) 85 | sys.exit(1) 86 | Analyzer.analyze_masks(sys.argv[1], len(sys.argv) > 2 and sys.argv[2] or None) 87 | -------------------------------------------------------------------------------- /tools/upgrade_boost_debian9.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | l=/etc/apt/sources.list.d/stretch-backports.list 4 | echo "deb http://ftp.de.debian.org/debian stretch-backports main contrib non-free" > $l 5 | 6 | apt-get update 7 | 8 | cd /tmp 9 | e=boost-dev-dummy 10 | cat > $e << EOD 11 | Section: misc 12 | Priority: optional 13 | Standards-Version: 3.9.2 14 | Package: boost-dev-dummy 15 | Depends: libboost1.67-dev 16 | Provides: libboost-dev 17 | Description: libboost1.67-dev from backports does not provide libboost-dev dependency 18 | EOD 19 | 20 | equivs-build $e 21 | 22 | apt-get purge \ 23 | libboost-atomic-dev libboost-atomic1.62-dev \ 24 | libboost-chrono-dev libboost-chrono1.62-dev \ 25 | libboost-context-dev libboost-context1.62-dev \ 26 | libboost-coroutine-dev libboost-coroutine1.62-dev \ 27 | libboost-date-time-dev libboost-date-time1.62-dev \ 28 | libboost-exception-dev libboost-exception1.62-dev \ 29 | libboost-fiber-dev libboost-fiber1.62-dev \ 30 | libboost-filesystem-dev libboost-filesystem1.62-dev \ 31 | libboost-graph-parallel-dev libboost-graph-parallel1.62-dev \ 32 | libboost-graph-dev libboost-graph1.62-dev \ 33 | libboost-iostreams-dev libboost-iostreams1.62-dev \ 34 | libboost-locale-dev libboost-locale1.62-dev \ 35 | libboost-log-dev libboost-log1.62-dev \ 36 | libboost-math-dev libboost-math1.62-dev \ 37 | libboost-mpi-dev libboost-mpi-python1.62-dev \ 38 | libboost-mpi-python-dev libboost-mpi-python1.62.0 \ 39 | libboost-mpi-dev libboost-mpi1.62-dev \ 40 | libboost-program-options-dev libboost-program-options1.62-dev \ 41 | libboost-python-dev libboost-python1.62-dev \ 42 | libboost-random-dev libboost-random1.62-dev \ 43 | libboost-regex-dev libboost-regex1.62-dev \ 44 | libboost-serialization-dev libboost-serialization1.62-dev \ 45 | libboost-signals-dev libboost-signals1.62-dev \ 46 | libboost-system-dev libboost-system1.62-dev \ 47 | libboost-test-dev libboost-test1.62-dev \ 48 | libboost-thread-dev libboost-thread1.62-dev \ 49 | libboost-timer-dev libboost-timer1.62-dev \ 50 | libboost-type-erasure-dev libboost-type-erasure1.62-dev \ 51 | libboost-wave-dev libboost-wave1.62-dev \ 52 | libboost-dev libboost1.62-dev \ 53 | libboost-tools-dev libboost1.62-tools-dev \ 54 | libboost-all-dev \ 55 | libyaml-cpp-dev 56 | 57 | apt-get install -t stretch-backports \ 58 | libboost1.67-dev \ 59 | libboost1.67-all-dev \ 60 | libboost1.67-tools-dev \ 61 | libboost-atomic1.67-dev libboost-atomic1.67.0 \ 62 | libboost-chrono1.67-dev libboost-chrono1.67.0 \ 63 | libboost-container1.67-dev libboost-container1.67.0 \ 64 | libboost-context1.67-dev libboost-context1.67.0 \ 65 | libboost-coroutine1.67-dev libboost-coroutine1.67.0 \ 66 | libboost-date-time1.67-dev libboost-date-time1.67.0 \ 67 | libboost-exception1.67-dev \ 68 | libboost-fiber1.67-dev libboost-fiber1.67.0 \ 69 | libboost-filesystem1.67-dev libboost-filesystem1.67.0 \ 70 | libboost-graph1.67-dev libboost-graph1.67.0 \ 71 | libboost-graph-parallel1.67-dev libboost-graph-parallel1.67.0 \ 72 | libboost-iostreams1.67-dev libboost-iostreams1.67.0 \ 73 | libboost-locale1.67-dev libboost-locale1.67.0 \ 74 | libboost-log1.67-dev libboost-log1.67.0 \ 75 | libboost-math1.67-dev libboost-math1.67.0 \ 76 | libboost-mpi1.67-dev libboost-mpi1.67.0 \ 77 | libboost-mpi-python1.67-dev libboost-mpi-python1.67.0 \ 78 | libboost-numpy1.67-dev libboost-numpy1.67.0 \ 79 | libboost-program-options1.67-dev libboost-program-options1.67.0 \ 80 | libboost-python1.67-dev libboost-python1.67.0 \ 81 | libboost-random1.67-dev libboost-random1.67.0 \ 82 | libboost-regex1.67-dev libboost-regex1.67.0 \ 83 | libboost-serialization1.67-dev libboost-serialization1.67.0 \ 84 | libboost-signals1.67-dev libboost-signals1.67.0 \ 85 | libboost-stacktrace1.67-dev libboost-stacktrace1.67.0 \ 86 | libboost-system1.67-dev libboost-system1.67.0 \ 87 | libboost-test1.67-dev libboost-test1.67.0 \ 88 | libboost-thread1.67-dev libboost-thread1.67.0 \ 89 | libboost-timer1.67-dev libboost-timer1.67.0 \ 90 | libboost-type-erasure1.67-dev libboost-type-erasure1.67.0 \ 91 | libboost-wave1.67-dev libboost-wave1.67.0 92 | 93 | dpkg -i boost-dev-dummy_1.0_all.deb 94 | 95 | apt-get install libyaml-cpp-dev 96 | -------------------------------------------------------------------------------- /tools/easydb-5-macros.inc: -------------------------------------------------------------------------------- 1 | 12 | 13 | RewriteMap easydb-5-dispatch-$uniqueid prg:$server_basedir/src/cgi-bin/dispatch_requests 14 | 15 | ScriptAlias /cgi-bin/ $server_basedir/src/cgi-bin/ 16 | 17 | DocumentRoot $webfrontend_basedir/build 18 | 19 | 20 | Options -Indexes 21 | Require all granted 22 | 23 | 24 | AliasMatch ^/imexporter/ $server_basedir/src/imexporter/imexporter-$uniqueid 25 | 26 | AliasMatch ^/api/(v1/|)l10n/static/(.*)$ $server_basedir/build/l10n/$2 27 | AliasMatch ^/api/(v1/|)xmlmapping/static/(.*)$ $server_basedir/build/xmlmapping/$2 28 | AliasMatch ^/api/(v1/|)xmlmapping/tags$ $server_basedir/build/xmlmapping/eas-tags.json 29 | AliasMatch ^/api/(v1/|)plugin/static/base/([^/]*)/(.*)$ \ 30 | $plugins_basedir/$2/build/webfrontend/$3 31 | AliasMatch ^/api/(v1/|)plugin/static/solution/([^/]*)/([^/]*)/(.*)$ \ 32 | $solution_basedir/$2/plugins/$3/build/webfrontend/$4 33 | 34 | # /api/eas/put to "upload" socket 35 | AliasMatch ^/api/(v1/|)eas/put$ $server_basedir/src/imexporter/upload-$uniqueid 36 | 37 | RewriteEngine on 38 | RewriteCond $webfrontend_basedir/build/%{REQUEST_URI} !-f 39 | RewriteCond $webfrontend_basedir/build/%{REQUEST_URI} !-d 40 | RewriteCond $webfrontend_basedir/build/%{REQUEST_URI} !-l 41 | RewriteCond %{REQUEST_URI} !^/api 42 | RewriteCond %{REQUEST_URI} !^/cgi-bin 43 | RewriteCond %{REQUEST_URI} !^/docs 44 | RewriteCond %{REQUEST_URI} !^/cui 45 | RewriteCond %{REQUEST_URI} !^/eas 46 | RewriteCond %{REQUEST_URI} !^/imexporter 47 | RewriteCond %{REQUEST_URI} !^/exec-php 48 | RewriteCond %{REQUEST_URI} !^/test 49 | RewriteCond %{REQUEST_URI} !^/upload 50 | RewriteRule ^.*$ $webfrontend_basedir/build/index.html [QSA] 51 | 52 | FastCgiExternalServer $server_basedir/src/imexporter/frontend-s-$uniqueid \ 53 | -idle-timeout 3000 -socket $frontend_slow_socket 54 | FastCgiExternalServer $server_basedir/src/imexporter/frontend-m-$uniqueid \ 55 | -idle-timeout 3000 -socket $frontend_medium_socket 56 | FastCgiExternalServer $server_basedir/src/imexporter/frontend-f-$uniqueid \ 57 | -idle-timeout 3000 -socket $frontend_fast_socket 58 | FastCgiExternalServer $server_basedir/src/imexporter/imexporter-$uniqueid \ 59 | -idle-timeout 3000 -socket $imexporter_socket 60 | FastCgiExternalServer $server_basedir/src/imexporter/upload-$uniqueid \ 61 | -idle-timeout 3000 -socket $upload_socket 62 | 63 | RewriteCond %{REQUEST_URI} !^/api/(v1/|)(l10n|plugin|xmlmapping)/static 64 | RewriteCond %{REQUEST_URI} !^/api/(v1/|)xmlmapping/tags 65 | RewriteCond %{REQUEST_URI} !^/api/(v1/|)eas/put 66 | RewriteRule ^/api/(v1/|)(.*)$ \ 67 | $server_basedir/src/imexporter/frontend-${easydb-5-dispatch-$uniqueid:$2:$frontend_medium_socket:$frontend_fast_socket}-$uniqueid [L,NS] 68 | 69 | AddCharset utf-8 .js 70 | AddCharset utf-8 .json 71 | AddCharset utf-8 .html 72 | 73 | 74 | SetOutputFilter DEFLATE 75 | SetEnvIfNoCase Request_URI "\.(?:gif|jpe?g|png)$" no-gzip 76 | 77 | 78 | 79 | Header set "Access-Control-Allow-Origin" "*" 80 | Header set "Access-Control-Allow-Headers" "Authorization, X-Easydb-Token" 81 | Header set "Cache-Control" "public,max-age=3600" 82 | 83 | 84 | 85 | Header set "Access-Control-Allow-Origin" "*" 86 | Header set "Access-Control-Allow-Headers" "Authorization, X-Easydb-Token" 87 | Header set "Cache-Control" "no-cache,private,must-revalidate" 88 | 89 | 90 | 91 | Header set "Access-Control-Allow-Origin" "*" 92 | Header set "Access-Control-Allow-Headers" "Authorization, X-Easydb-Token" 93 | Header set "Cache-Control" "no-cache,private,must-revalidate" 94 | 95 | 96 | Header set "Access-Control-Allow-Origin" "*" 97 | Header set "Access-Control-Allow-Headers" "Authorization, X-Easydb-Token" 98 | Header set "Cache-Control" "no-cache,private,must-revalidate" 99 | 100 | 101 | 102 | ErrorDocument 404 /cgi-bin/error-handler.sh 103 | ErrorDocument 500 /cgi-bin/error-handler.sh 104 | ErrorDocument 502 /cgi-bin/error-handler.sh 105 | 106 | 107 | 108 | AliasMatch ^/upload(.*) $directory$1 109 | 110 | DAV on 111 | Options -MultiViews 112 | 113 | ErrorDocument 404 "Not Found" 114 | ErrorDocument 500 "Internal Server Error" 115 | ErrorDocument 502 "Bad Gateway" 116 | 117 | 118 | 119 | # vim:set ft=apache: 120 | -------------------------------------------------------------------------------- /tools/easydb-5-macros.inc.proxy: -------------------------------------------------------------------------------- 1 | 12 | 13 | RewriteMap easydb-5-dispatch-$uniqueid prg:$server_basedir/src/cgi-bin/dispatch_requests 14 | 15 | ScriptAlias /cgi-bin/ $server_basedir/src/cgi-bin/ 16 | 17 | DocumentRoot $webfrontend_basedir/build 18 | 19 | #LogLevel debug rewrite:trace3 20 | 21 | 22 | Options -Indexes 23 | Require all granted 24 | 25 | 26 | AliasMatch ^/imexporter/ $server_basedir/src/imexporter/imexporter-$uniqueid 27 | 28 | AliasMatch ^/api/(v1/|)l10n/static/(.*)$ $server_basedir/build/l10n/$2 29 | AliasMatch ^/api/(v1/|)xmlmapping/static/(.*)$ $server_basedir/build/xmlmapping/$2 30 | AliasMatch ^/api/(v1/|)xmlmapping/tags$ $server_basedir/build/xmlmapping/eas-tags.json 31 | AliasMatch ^/api/(v1/|)plugin/static/base/([^/]*)/(.*)$ \ 32 | $plugins_basedir/$2/build/webfrontend/$3 33 | AliasMatch ^/api/(v1/|)plugin/static/solution/([^/]*)/([^/]*)/(.*)$ \ 34 | $solution_basedir/$2/plugins/$3/build/webfrontend/$4 35 | 36 | # /api/eas/put to "upload" socket 37 | # AliasMatch ^/api/(v1/|)eas/put$ $server_basedir/src/imexporter/upload-$uniqueid 38 | 39 | RewriteEngine on 40 | RewriteCond $webfrontend_basedir/build/%{REQUEST_URI} !-f 41 | RewriteCond $webfrontend_basedir/build/%{REQUEST_URI} !-d 42 | RewriteCond $webfrontend_basedir/build/%{REQUEST_URI} !-l 43 | RewriteCond %{REQUEST_URI} !^/api 44 | RewriteCond %{REQUEST_URI} !^/cgi-bin 45 | RewriteCond %{REQUEST_URI} !^/docs 46 | RewriteCond %{REQUEST_URI} !^/cui 47 | RewriteCond %{REQUEST_URI} !^/eas 48 | RewriteCond %{REQUEST_URI} !^/imexporter 49 | RewriteCond %{REQUEST_URI} !^/exec-php 50 | RewriteCond %{REQUEST_URI} !^/test 51 | RewriteCond %{REQUEST_URI} !^/upload 52 | RewriteCond %{REQUEST_URI} !^/external 53 | RewriteRule ^.*$ $webfrontend_basedir/build/index.html [QSA] 54 | 55 | # FastCgiExternalServer $server_basedir/src/imexporter/frontend-s-$uniqueid \ 56 | # -idle-timeout 3000 -socket $frontend_slow_socket 57 | # FastCgiExternalServer $server_basedir/src/imexporter/frontend-m-$uniqueid \ 58 | # -idle-timeout 3000 -socket $frontend_medium_socket 59 | # FastCgiExternalServer $server_basedir/src/imexporter/frontend-f-$uniqueid \ 60 | # -idle-timeout 3000 -socket $frontend_fast_socket 61 | # FastCgiExternalServer $server_basedir/src/imexporter/imexporter-$uniqueid \ 62 | # -idle-timeout 3000 -socket $imexporter_socket 63 | # FastCgiExternalServer $server_basedir/src/imexporter/upload-$uniqueid \ 64 | # -idle-timeout 3000 -socket $upload_socket 65 | 66 | RewriteRule ^/api/(v1/|)eas/put$ \ 67 | /server-fcgi-upload-$uniqueid [PT,E=QUERY_STRING:/$1$2,E=PATH_INFO:/$1$2,E=SCRIPT_NAME:/api/$1$2] 68 | 69 | RewriteCond %{REQUEST_URI} !^/api/(v1/|)(l10n|plugin|xmlmapping)/static 70 | RewriteCond %{REQUEST_URI} !^/api/(v1/|)xmlmapping/tags 71 | RewriteCond %{REQUEST_URI} !^/api/(v1/|)eas/put 72 | RewriteRule ^/api/(v1/|)(.*)$ \ 73 | /server-fcgi-frontend-${easydb-5-dispatch-$uniqueid:$2:$frontend_medium_socket:$frontend_fast_socket}-$uniqueid/api/$1$2 [PT,E=QUERY_STRING:/$1$2,E=PATH_INFO:/$1$2,E=SCRIPT_NAME:/api/$1$2] 74 | 75 | ProxyFCGISetEnvIf "true" SCRIPT_NAME "/api%{env:PATH_INFO}" 76 | ProxyPass /server-fcgi-frontend-s-$uniqueid "unix:$frontend_slow_socket|fcgi://localhost/server/s-$identifier" 77 | ProxyPass /server-fcgi-frontend-m-$uniqueid "unix:$frontend_medium_socket|fcgi://localhost/server/m-$identifier" 78 | ProxyPass /server-fcgi-frontend-f-$uniqueid "unix:$frontend_fast_socket|fcgi://localhost/server/f-$identifier" 79 | ProxyPass /server-fcgi-imexporter-$uniqueid "unix:$imexporter_socket|fcgi://localhost/server/i-$identifier" 80 | ProxyPass /server-fcgi-upload-$uniqueid "unix:$upload_socket|fcgi://localhost/server/u-$identifier" 81 | 82 | AddCharset utf-8 .js 83 | AddCharset utf-8 .json 84 | AddCharset utf-8 .html 85 | 86 | 87 | SetOutputFilter DEFLATE 88 | SetEnvIfNoCase Request_URI "\.(?:gif|jpe?g|png)$" no-gzip 89 | 90 | 91 | 92 | Header set "Access-Control-Allow-Origin" "*" 93 | Header set "Access-Control-Allow-Headers" "Authorization, X-Easydb-Token" 94 | Header set "Cache-Control" "public,max-age=3600" 95 | 96 | 97 | 98 | Header set "Access-Control-Allow-Origin" "*" 99 | Header set "Access-Control-Allow-Headers" "Authorization, X-Easydb-Token" 100 | Header set "Cache-Control" "no-cache,private,must-revalidate" 101 | 102 | 103 | 104 | Header set "Access-Control-Allow-Origin" "*" 105 | Header set "Access-Control-Allow-Headers" "Authorization, X-Easydb-Token" 106 | Header set "Cache-Control" "no-cache,private,must-revalidate" 107 | 108 | 109 | Header set "Access-Control-Allow-Origin" "*" 110 | Header set "Access-Control-Allow-Headers" "Authorization, X-Easydb-Token" 111 | Header set "Cache-Control" "no-cache,private,must-revalidate" 112 | 113 | 114 | ErrorDocument 404 /cgi-bin/error-handler.sh 115 | ErrorDocument 500 /cgi-bin/error-handler.sh 116 | ErrorDocument 502 /cgi-bin/error-handler.sh 117 | 118 | 119 | 120 | AliasMatch ^/upload(.*) $directory$1 121 | 122 | DAV on 123 | Options -MultiViews 124 | 125 | ErrorDocument 404 "Not Found" 126 | ErrorDocument 500 "Internal Server Error" 127 | ErrorDocument 502 "Bad Gateway" 128 | 129 | 130 | 131 | # vim:set ft=apache: 132 | -------------------------------------------------------------------------------- /tools/merge_loca_csv.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # coding=utf8 3 | 4 | import sys 5 | import csv 6 | import re 7 | import argparse 8 | import json 9 | 10 | argparser = argparse.ArgumentParser() 11 | 12 | argparser.add_argument('master', metavar='master', 13 | type=str, help='master l10n csv file') 14 | argparser.add_argument('merge', metavar='merge', 15 | type=str, help='merge l10n csv file') 16 | 17 | required = argparser.add_argument_group('required arguments') 18 | required.add_argument('-k', '--key', type=str, required=True, metavar='key', 19 | help='[Example: -k key] key column name for merging') 20 | required.add_argument('-m', '--merge-columns', type=str, required=True, metavar='merge_columns', 21 | help='[Example: -m=ru-RU,pl-PL] string with comma separated columns from merge csv that should be merged into master. must be in both master and merge csv files.') 22 | 23 | argparser.add_argument('-f', '--set-key-fallback', 24 | help='fallback column for missing entries, must be in master csv') 25 | argparser.add_argument('-s', '--sort', action='store_true', 26 | help='sort keys alphabetically in output csv') 27 | 28 | KEY_INTERNAL = '_key' 29 | ROW_INTERNAL = '_row' 30 | 31 | 32 | def to_stderr(line, _exit=True): 33 | sys.stderr.write(str(line) + "\n") 34 | if _exit: 35 | exit(1) 36 | 37 | 38 | def sort_keys(x, y): 39 | if KEY_INTERNAL not in x: 40 | return 0 41 | if KEY_INTERNAL not in y: 42 | return 0 43 | if x[KEY_INTERNAL] == y[KEY_INTERNAL]: 44 | return 0 45 | if x[KEY_INTERNAL] > y[KEY_INTERNAL]: 46 | return 1 47 | return -1 48 | 49 | 50 | if __name__ == "__main__": 51 | 52 | args = argparser.parse_args() 53 | 54 | _merge_columns = set() 55 | for a in args.merge_columns.split(','): 56 | a = a.strip() 57 | if len(a) > 0: 58 | _merge_columns.add(a) 59 | 60 | _merge_columns = list(_merge_columns) 61 | 62 | if len(_merge_columns) < 1: 63 | to_stderr("'--merge-columns' must be list of valid column names") 64 | 65 | _master_csv_data = [] 66 | _merge_csv_data = {} 67 | 68 | _master_columns = [] 69 | _master_keys = set() 70 | _merge_keys = set() 71 | 72 | is_master_file = True 73 | for csvfilename in [args.master, args.merge]: 74 | 75 | with open(csvfilename, 'rb') as csvfile: 76 | reader = csv.DictReader(csvfile, delimiter=',', quotechar='"') 77 | if reader.fieldnames is None: 78 | to_stderr("no columns in csv file %s" % csvfilename) 79 | 80 | # check if the --key column is present in master and merge csv 81 | if not args.key in reader.fieldnames: 82 | to_stderr("key column '%s' is missing in %s" % 83 | (args.key, csvfilename)) 84 | 85 | # check if the --merge-columns are present in master and merge csv 86 | for column in _merge_columns: 87 | if not column in reader.fieldnames: 88 | to_stderr("merge-column %s is not in csv file %s" % 89 | (column, csvfilename)) 90 | 91 | for fieldname in reader.fieldnames: 92 | if is_master_file: 93 | _master_columns.append(fieldname) 94 | 95 | for row in reader: 96 | _row_data = [] 97 | 98 | _loca_key = row[args.key].strip() 99 | if _loca_key == None: 100 | continue 101 | if _loca_key == '' and not is_master_file: 102 | continue 103 | 104 | if is_master_file: 105 | _master_keys.add(_loca_key) 106 | else: 107 | _merge_keys.add(_loca_key) 108 | 109 | for fieldname in reader.fieldnames: 110 | if is_master_file: 111 | _row_data.append(row[fieldname]) 112 | elif fieldname in _merge_columns: 113 | if _loca_key not in _merge_csv_data: 114 | if _loca_key not in _master_keys: 115 | continue 116 | _merge_csv_data[_loca_key] = {} 117 | _merge_csv_data[_loca_key][fieldname] = row[fieldname] 118 | 119 | if is_master_file: 120 | _master_csv_data.append({ 121 | KEY_INTERNAL: _loca_key, 122 | ROW_INTERNAL: _row_data 123 | }) 124 | 125 | is_master_file = False 126 | 127 | for _loca_key in _merge_keys: 128 | if not _loca_key in _master_keys: 129 | to_stderr('missing in master: %s' % _loca_key, False) 130 | 131 | for _loca_key in _master_keys: 132 | if not _loca_key in _merge_keys: 133 | to_stderr('missing in merge: %s' % _loca_key, False) 134 | 135 | fallback_column = None 136 | if args.set_key_fallback is not None: 137 | if not args.set_key_fallback in _master_columns: 138 | to_stderr("fallback key column '%s' is missing in master csv %s" % 139 | (args.set_key_fallback, args.master)) 140 | else: 141 | # use 'set-key-fallback' column as fallback 142 | fallback_column = args.set_key_fallback 143 | 144 | # merge all key values from merge csv into master csv 145 | for d in _master_csv_data: 146 | if not KEY_INTERNAL in d or not ROW_INTERNAL in d: 147 | continue 148 | if len(d[KEY_INTERNAL]) < 1: 149 | continue 150 | 151 | _loca_key = d[KEY_INTERNAL] 152 | if not _loca_key in _merge_csv_data: 153 | continue 154 | 155 | _key_data = _merge_csv_data[_loca_key] 156 | 157 | for i in range(len(_master_columns)): 158 | if i >= len(d[ROW_INTERNAL]): 159 | continue 160 | _col = _master_columns[i] 161 | if not _col in _key_data: 162 | continue 163 | 164 | _merge_value = _key_data[_col] 165 | _master_value = d[ROW_INTERNAL][i] 166 | 167 | # value neither in master nor in merge -> key fallback 168 | if (_master_value is None or len(_master_value) < 1) and (_merge_value is None or len(_merge_value) < 1): 169 | if fallback_column == args.key: 170 | # use key as fallback 171 | d[ROW_INTERNAL][i] = _loca_key 172 | elif fallback_column is not None: 173 | # use value from callback column 174 | d[ROW_INTERNAL][i] = _master_value if _master_value is not None else '' 175 | 176 | # value is in master and in merge -> merge value 177 | else: 178 | d[ROW_INTERNAL][i] = _merge_value 179 | 180 | if args.sort: 181 | _master_csv_data.sort(cmp=sort_keys) 182 | 183 | writer = csv.writer(sys.stdout, delimiter=',', quotechar='"') 184 | writer.writerow(_master_columns) 185 | 186 | for row in _master_csv_data: 187 | if not ROW_INTERNAL in row: 188 | continue 189 | writer.writerow(row[ROW_INTERNAL]) 190 | -------------------------------------------------------------------------------- /tools/easy5-dev.init: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ### BEGIN INIT INFO 4 | # Provides: easy5-instance 5 | # Required-Start: $local_fs $network $time postgresql mountnfs elasticsearch 6 | # Required-Stop: $local_fs $network $time postgresql mountnfs elasticsearch 7 | # Should-Start: $syslog 8 | # Should-Stop: $syslog 9 | # Default-Start: 2 3 4 5 10 | # Default-Stop: 0 1 6 11 | # Short-Description: easydb5 instance 12 | ### END INIT INFO 13 | 14 | 15 | # purpose of this script: 16 | # * Start easydb5-server within a screen. 17 | # * Change to unpriviledged user $RUNAS to run easydb5-server and screen as. (sudo) 18 | # * Function as an init script: arguments start and stop are enough to do the right thing. 19 | # * Read instance name from $0 to require only a symlink per instance. (script file name after "easy5-") 20 | # * Do not let screen(and the bash in the screen) die after easydb5-server ends. 21 | # * Have the starting commandline as the most recent bash-history-line. 22 | 23 | # usage: 24 | # /etc/init.d/easy5-$MYNAME start 25 | # /etc/init.d/easy5-$MYNAME stop 26 | # 27 | # /etc/init.d/easy5-$MYNAME status 28 | # * show pstree of running screen/bash containing easdb5-server 29 | # /etc/init.d/easy5-$MYNAME runs 30 | # * only indicate by returnvalue(exitcode) whether easydb5-server is running 31 | # 32 | # ln -s /srv/git/checkouts/imexporter/src/tools/easy5-dev.init /etc/init.d/easy5-$INST 33 | # * prepares this script for another instance $INST. 34 | # * requires /srv/www/$INST/easydb-server/start.sh 35 | # * $RUNAS and $STARTDIR are hardcoded, for init usage. 36 | # * If you want different $RUNAS, $STARTDIR, ... then copy and change script instead. 37 | # 38 | # DEBUG=1 /etc/init.d/easy5-$MYNAME 39 | # * show debug output 40 | # DEBUG="" /etc/init.d/easy5-$MYNAME 41 | # * no debug output (default) 42 | 43 | 44 | # CONFIG ################## 45 | 46 | MYNAME=`basename $0` # /etc/init.d/easy5-instancename -> easy5-instancename 47 | MYNAME=${MYNAME#*-} # easy5-instancename -> instancename 48 | STARTDIR=/srv/www/$MYNAME/easydb-server 49 | SLEEP_RESTART=1 # wait how many seconds between stop and start, when doing restart 50 | RUNAS=easydb 51 | BASHRC=/dev/null 52 | NAME_IN_PSTREE=es-manager 53 | NAME_IN_PS=easydb-server 54 | 55 | # PREPARATION ################## 56 | 57 | # log that and when this is called as root 58 | [ "$LOGNAME" = root ] && echo "$0 called with '$1' on `date` in runlevel `runlevel`" >> /var/log/custom-init 59 | 60 | # helper functions: 61 | 62 | debug(){ [ "$DEBUG" ] && echo "DEBUG: $@" >&2; } # output to stderr, only if $DEBUG is not empty 63 | 64 | get_screen_pid(){ # sets $SCREENPID to the PID of one screen with matching name 65 | SCREENPS=`ps x|grep -i -- 'screen.*-S easy5-'$MYNAME |grep -v grep|tail -n 1` 66 | set $SCREENPS "" # strings are sorted into $1 $2 $3 ... 67 | SCREENPID=$1 68 | if [ "$SCREENPID" ] ; then 69 | return 0 70 | else 71 | return 1 72 | fi 73 | } 74 | 75 | is_pid_running() { 76 | ps --pid="$1" &>/dev/null 77 | } 78 | 79 | # change user: 80 | 81 | debug "This script was called as user '$LOGNAME' to do '$1'" 82 | if [ "$LOGNAME" = "$RUNAS" ] ; then 83 | echo -n # just fall through 84 | elif [ "$LOGNAME" = root ] || [ "$LOGNAME" = jenkins ] ; then 85 | PTSFILE=`ls /proc/$$/fd -l --color=no|sed -n 's#.*/dev/pts#/dev/pts#p'|head -n 1` 86 | if [ -e "$PTSFILE" ] ; then 87 | debug "changing permissions on $PTSFILE (`ls -l $PTSFILE`)" 88 | chmod a+rw "$PTSFILE" 89 | else 90 | #debug "no pts-file found: ls /proc/$$/fd -l --color=no : `ls /proc/$$/fd -l --color=no`" 91 | debug "no pts-file found in /proc/$$/fd" 92 | fi 93 | debug "changing from user '$LOGNAME' to '$RUNAS'" 94 | exec sudo -u $RUNAS DEBUG=$DEBUG $0 $@ 95 | debug "ERROR: This line should not be reached !" 96 | exit 1 97 | else 98 | echo "ERROR: LOGNAME '$LOGNAME' does neither match RUNAS '$RUNAS' nor is it 'root' or 'jenkins'. Aborting." 99 | exit 1 100 | fi 101 | 102 | # store the bash PID into a file: 103 | 104 | eval PIDFILE=~$RUNAS/easy5-$MYNAME.bashpid 105 | debug "PIDFILE=$PIDFILE" 106 | 107 | 108 | # MAIN ################## 109 | 110 | case "$1" in 111 | start) 112 | debug "screens before start: `screen -ls`" 113 | 114 | echo -n "Starting $MYNAME ... " ; [ "$DEBUG" ] && echo 115 | 116 | COUNT=0 117 | while get_screen_pid; do 118 | debug "screen still present. Interpreting start as restart. Killing screen $SCREENPID" 119 | kill $SCREENPID 120 | COUNT=$((COUNT+1)) 121 | if [ $COUNT -gt 10 ] ; then echo "ABORTING. Tried 10 times to stop old screen."; exit 1; fi 122 | done 123 | 124 | cd $STARTDIR 125 | # start a screen to later access the output and input: 126 | screen -h 3000 -m -d -S easy5-$MYNAME 127 | screen -S easy5-$MYNAME -X -p 0 stuff $'echo $$>'$PIDFILE'\n' 128 | screen -S easy5-$MYNAME -X -p 0 stuff $'./start.sh\n' 129 | # -m -d starts the screen detached from the beginning 130 | # -S sets a custom "name" to a screen process 131 | # To prevent screen from exiting, no command is given when creating the screen. 132 | # Instead commands are then later stuffed into the input buffer of screen with -X -p0 stuff $'' 133 | # -p 0 just selects the first "window" of the screen. 134 | # -h 1234 sets the number of buffered lines to scroll around in with Ctrl-A Escape. 135 | 136 | echo . # init-script etiquette. 137 | 138 | # being verbose and helpful: 139 | if [ "$PID" ] && is_pid_running "$PID"; then 140 | which pstree >/dev/null 2>&1 && debug "running: `pstree -pnsuUa $PID`" 141 | else 142 | debug "bash not running: $PID (from PIDFILE $PIDFILE)" 143 | fi 144 | debug "screens after start: `screen -ls`" 145 | ;; 146 | "") 147 | echo $0 '[start|stop|status|restart]' 148 | ;; 149 | stop) 150 | debug "screens before stop: `screen -ls`" 151 | 152 | echo -n "Stopping $MYNAME ... " ; [ "$DEBUG" ] && echo 153 | #screen -S $MYNAME -X quit # does not work if there is more than one screen matching the name 154 | 155 | COUNT=0 156 | while get_screen_pid; do 157 | debug "Killing screen $SCREENPID" 158 | kill $SCREENPID 159 | COUNT=$((COUNT+1)) 160 | if [ $COUNT -gt 10 ] ; then echo "ABORTING. Tried 10 times to stop old screen."; exit 1; fi 161 | done 162 | echo . # init-script etiquette. 163 | 164 | # cleanup: 165 | if [ -e "$PIDFILE" ] ; then 166 | PID=`<$PIDFILE` # needed for output below 167 | rm ${PIDFILE}.stopped >/dev/null 2>&1 168 | mv $PIDFILE ${PIDFILE}.stopped 169 | fi 170 | 171 | # being verbose and helpful: 172 | if [ "$PID" ] && is_pid_running "$PID" ; then 173 | which pstree >/dev/null 2>&1 && debug "Processes under bash $PID after stop: `pstree -pnsuUa $PID`" 174 | else 175 | debug "No process with PID '$PID' running. (PID was bash's ID according to PIDFILE '$PIDFILE')" 176 | fi 177 | debug "Screens after stop: `screen -ls`" 178 | ;; 179 | restart) 180 | $0 stop 181 | echo Sleeping $SLEEP_RESTART seconds to allow $0 to clean up. 182 | sleep $SLEEP_RESTART 183 | $0 start 184 | ;; 185 | runs) 186 | [ -e "$PIDFILE" ] || exit 64 # PID of bash not known due to missing PID file $PIDFILE. Unable to check for running $NAME_IN_PSTREE of ${MYNAME}-instance. 187 | PID=`<$PIDFILE` 188 | is_pid_running $PID || exit 32 189 | 190 | pstree -pnuUa $PID|grep -q $NAME_IN_PSTREE && exit 0 191 | 192 | debug "No $NAME_IN_PSTREE found under bash $PID." 193 | exit 3 194 | ;; 195 | status) 196 | EXIT=0 197 | ### server processes 198 | NUMRUN=`ps x|grep $NAME_IN_PS|grep -v grep|wc -l` # easydb-server processes running as $LOGNAME 199 | if [ $NUMRUN -eq 0 ] ; then 200 | echo "No $NAME_IN_PS running for user $RUNAS at all." 201 | EXIT=$((EXIT+8)) 202 | else 203 | debug "$NUMRUN processes of $NAME_IN_PS running for user $RUNAS. (for whatever instances)" 204 | fi 205 | 206 | ### screen 207 | if get_screen_pid && is_pid_running $SCREENPID ; then 208 | echo "$MYNAME: Processes in screen:" 209 | pstree -pnuUa $SCREENPID || debug "Warning: No success showing pstree for screen pid '$SCREENPID'." 210 | else 211 | echo "$MYNAME: No screen running." 212 | EXIT=$((EXIT+16)) 213 | fi 214 | 215 | ### server processes under the bash 216 | if [ -e "$PIDFILE" ] ; then 217 | PID=`<$PIDFILE` 218 | if is_pid_running $PID ; then 219 | if get_screen_pid && is_pid_running $SCREENPID ; then 220 | debug "$MYNAME: Already showed pstree under screen, not showing again for bash." 221 | else 222 | echo "$MYNAME Processes under bash $PID (taken from PIDFILE '$PIDFILE') :" 223 | pstree -pnuUa $PID || debug "Warning: No success showing pstree for bash pid '$PID'." 224 | fi 225 | else 226 | echo "$MYNAME: bash $PID is not running. (taken from PIDFILE '$PIDFILE')" 227 | EXIT=$((EXIT+32)) 228 | fi 229 | else 230 | EXIT=$((EXIT+64)) 231 | debug "PID of bash not known due to missing PID file $PIDFILE. Unable to check for running $NAME_IN_PSTREE of ${MYNAME}-instance." 232 | fi 233 | 234 | exit $EXIT 235 | ;; 236 | *) 237 | echo $0 '[start|stop|restart|status]' 238 | esac 239 | -------------------------------------------------------------------------------- /src/commons.coffee: -------------------------------------------------------------------------------- 1 | # these common classes + methods are used by the plugins, developed by the vzg: gnd, geonames, gn250, gvk, georef, dante, getty, tna_discovery 2 | 3 | class CustomDataTypeWithCommons extends CustomDataType 4 | 5 | # init data 6 | initData: (data) -> 7 | if not data[@name()] 8 | cdata = {} 9 | data[@name()] = cdata 10 | else 11 | cdata = data[@name()] 12 | 13 | if not cdata.url 14 | cdata.url = "" 15 | 16 | cdata 17 | 18 | renderFieldAsGroup: -> 19 | return false 20 | 21 | renderRowAsBlock: (data, top_level_data, opts) -> 22 | false 23 | 24 | # returns a map for search tokens, containing name and value strings. 25 | getQueryFieldBadge: (data) => 26 | if data["#{@name()}:unset"] 27 | value = $$("text.column.badge.without") 28 | else 29 | value = data[@name()] 30 | 31 | name: @nameLocalized() 32 | value: value 33 | 34 | 35 | supportsStandard: -> 36 | true 37 | 38 | 39 | supportsFacet: -> 40 | true 41 | 42 | 43 | getFacet: (opts) -> 44 | opts.field = @ 45 | new CustomDataTypeCommonFacet(opts) 46 | 47 | 48 | # provide a sort function to sort your data 49 | getSortFunction: -> 50 | (a, b) => 51 | CUI.util.compareIndex(a[@name()]?.conceptName or 'zzz', b[@name()]?.conceptName or 'zzz') 52 | 53 | # Enable sort 54 | hasRenderForSort: -> 55 | return true 56 | 57 | sortExtraOpts: -> 58 | return [ 59 | { 60 | text: "conceptName" 61 | value: "conceptName" 62 | } 63 | { 64 | text: "conceptURI" 65 | value: "conceptURI" 66 | } 67 | ] 68 | 69 | # returns markup to display in expert search 70 | renderSearchInput: (data, opts={}) -> 71 | search_token = new SearchToken 72 | column: @ 73 | data: data 74 | fields: opts.fields 75 | .getInput().DOM 76 | 77 | 78 | getFieldNamesForSearch: -> 79 | @getFieldNames() 80 | 81 | 82 | getFieldNamesForSuggest: -> 83 | @getFieldNames() 84 | 85 | 86 | getFieldNames: -> 87 | 88 | field_names = [ 89 | @fullName()+".conceptURI" 90 | @fullName()+".conceptName" 91 | ] 92 | 93 | field_names 94 | 95 | 96 | # returns a search filter suitable to the search array part 97 | # of the request, the data to be search is in data[key] plus 98 | # possible additions, which should be stored in key+":" 99 | 100 | getSearchFilter: (data, key=@name()) -> 101 | 102 | if data[key+":unset"] 103 | filter = 104 | type: "in" 105 | fields: [ @fullName()+".conceptName" ] 106 | in: [ null ] 107 | filter._unnest = true 108 | filter._unset_filter = true 109 | return filter 110 | 111 | filter = super(data, key) 112 | if filter 113 | return filter 114 | 115 | if CUI.util.isEmpty(data[key]) 116 | return 117 | 118 | val = data[key] 119 | [str, phrase] = Search.getPhrase(val) 120 | 121 | switch data[key+":type"] 122 | when "token", "fulltext", undefined 123 | filter = 124 | type: "match" 125 | mode: data[key+":mode"] 126 | fields: @getFieldNamesForSearch() 127 | string: str 128 | phrase: phrase 129 | 130 | when "field" 131 | filter = 132 | type: "in" 133 | fields: @getFieldNamesForSearch() 134 | in: [ str ] 135 | 136 | filter 137 | 138 | 139 | ####################################################################### 140 | # handle editorinput 141 | renderEditorInput: (data, top_level_data, opts) -> 142 | 143 | name = @name() 144 | if not data[name] 145 | data[name] = { 146 | conceptName : '' 147 | conceptURI : '' 148 | } 149 | 150 | @__renderEditorInputPopover(data, data[name], opts) 151 | 152 | 153 | ####################################################################### 154 | # buttons, which opens and closes popover+menu 155 | __renderEditorInputPopover: (data, cdata, opts={}) -> 156 | that = @ 157 | layout 158 | 159 | # build layout for editor 160 | layout = new CUI.HorizontalLayout 161 | class: 'customPluginEditorLayout' 162 | center: 163 | class: '' 164 | right: 165 | content: 166 | new CUI.Buttonbar 167 | buttons: [ 168 | new CUI.Button 169 | text: '' 170 | icon: new CUI.Icon(class: "fa-ellipsis-v") 171 | class: 'pluginDirectSelectEditSearch' 172 | # show "dots"-menu on click on 3 vertical dots 173 | onClick: (e, dotsButton) => 174 | dotsButtonMenu = new CUI.Menu 175 | class: "customDataTypeCommonsMenu" 176 | element : dotsButton 177 | menu_items = [ 178 | #search 179 | text: $$('custom.data.type.commons.controls.search.label') 180 | value: 'search' 181 | icon_left: new CUI.Icon(class: "fa-search") 182 | onClick: (e2, btn2) -> 183 | that.showEditPopover(dotsButton, data, cdata, layout, opts) 184 | dotsButtonMenu.hide() 185 | ] 186 | 187 | if typeof that.__getAdditionalTooltipInfo == "function" 188 | detailinfo = 189 | #detailinfo 190 | text: $$('custom.data.type.commons.controls.detailinfo.label') 191 | value: 'detail' 192 | icon_left: new CUI.Icon(class: "fa-info-circle") 193 | disabled: that.isEmpty(data, 0, 0) 194 | onClick: (eDetailInfo, btnDetailInfo) -> 195 | tooltip = new CUI.Tooltip 196 | element: btnDetailInfo 197 | placement: 'w' 198 | markdown: true 199 | show_ms: 1000 200 | hide_ms: 200 201 | content: (tooltip) -> 202 | if !that.isEmpty(data, 0, 0) 203 | # get jskos-details-data 204 | encodedURI = encodeURIComponent(cdata.conceptURI) 205 | extendedInfo_xhr = { "xhr" : undefined } 206 | that.__getAdditionalTooltipInfo(encodedURI, tooltip, extendedInfo_xhr) 207 | # loader, until details are xhred 208 | new CUI.Label(icon: "spinner", text: $$('custom.data.type.commons.modal.form.popup.loadingstring')) 209 | tooltip.show() 210 | # hide tooltip + menu on mouseaction 211 | CUI.Events.listen 212 | type: ["click", "dblclick", "mouseout"] 213 | capture: true 214 | node: btnDetailInfo 215 | only_once: true 216 | call: (ev) => 217 | dotsButtonMenu.hide() 218 | 219 | menu_items.push detailinfo 220 | uriCall = 221 | # call uri 222 | text: $$('custom.data.type.commons.controls.calluri.label') 223 | value: 'uri' 224 | icon_left: new CUI.Icon(class: "fa-external-link") 225 | disabled: that.isEmpty(data, 0, 0) || ! CUI.parseLocation(cdata.conceptURI) 226 | onClick: -> 227 | window.open cdata.conceptURI, "_blank" 228 | dotsButtonMenu.hide() 229 | menu_items.push uriCall 230 | deleteClear = 231 | #delete / clear 232 | text: $$('custom.data.type.commons.controls.delete.label') 233 | value: 'delete' 234 | name: 'deleteValueFromDANTEPlugin' 235 | class: 'deleteValueFromDANTEPlugin' 236 | icon_left: new CUI.Icon(class: "fa-trash") 237 | disabled: that.isEmpty(data, 0, 0) 238 | onClick: -> 239 | cdata = { 240 | conceptName : '' 241 | conceptURI : '' 242 | } 243 | data[that.name()] = cdata 244 | that.__updateResult(cdata, layout, opts) 245 | dotsButtonMenu.hide() 246 | menu_items.push deleteClear 247 | itemList = 248 | items: menu_items 249 | dotsButtonMenu._auto_close_after_click = false 250 | dotsButtonMenu.setItemList(itemList) 251 | dotsButtonMenu.show() 252 | ] 253 | 254 | # other plugins can trigger layout-rebuild by deletion of data-value 255 | CUI.Events.registerEvent 256 | type: "custom-deleteDataFromPlugin" 257 | bubble: false 258 | 259 | CUI.Events.listen 260 | type: "custom-deleteDataFromPlugin" 261 | instance: that 262 | node: layout 263 | call: => 264 | cdata = {} 265 | data[that.name()] = cdata 266 | opts.deleteDataFromPlugin = true 267 | that.__updateResult(cdata, layout, opts) 268 | @__updateResult(cdata, layout, opts) 269 | layout 270 | 271 | 272 | ####################################################################### 273 | # show popover and fill it with the form-elements 274 | showEditPopover: (btn, data, cdata, layout, opts) -> 275 | that = @ 276 | 277 | suggest_Menu 278 | 279 | # init xhr-object to abort running xhrs 280 | searchsuggest_xhr = { "xhr" : undefined } 281 | 282 | # set default value for count of suggestions 283 | cdata.countOfSuggestions = 50 284 | cdata_form = new CUI.Form 285 | class: 'cdtFormWithPadding' 286 | data: cdata 287 | fields: that.__getEditorFields(cdata) 288 | onDataChanged: (data, elem) => 289 | @__updateResult(cdata, layout, opts) 290 | @__setEditorFieldStatus(cdata, layout) 291 | @__updateSuggestionsMenu(cdata, cdata_form, data.searchbarInput, elem, suggest_Menu, searchsuggest_xhr, layout, opts) 292 | .start() 293 | 294 | # init suggestmenu 295 | suggest_Menu = new CUI.Menu 296 | element: cdata_form.getFieldsByName("searchbarInput")[0] 297 | use_element_width_as_min_width: true 298 | class: "customDataTypeCommonsMenu" 299 | 300 | @popover = new CUI.Popover 301 | element: btn 302 | placement: "wn" 303 | class: "commonPlugin_Popover" 304 | pane: 305 | # titel of popovers 306 | header_left: new CUI.Label(text: $$('custom.data.type.commons.popover.choose.label')) 307 | content: cdata_form 308 | .show() 309 | 310 | 311 | ######################################################################## 312 | # check if field is empty 313 | isEmpty: (data, top_level_data, opts={}) -> 314 | if opts?.mode == "expert" 315 | # check input in expert search 316 | if typeof data[@name()] == 'object' 317 | return CUI.util.isEmpty(data[@name()]?) 318 | else 319 | return CUI.util.isEmpty(data[@name()]?.trim()) 320 | 321 | return not data[@name()]?.conceptName 322 | 323 | ####################################################################### 324 | # is called, when record is being saved by user 325 | getSaveData: (data, save_data, opts) -> 326 | 327 | that = @ 328 | 329 | if opts.demo_data 330 | return { 331 | conceptName : 'Example' 332 | conceptURI : 'https://example.com' 333 | } 334 | 335 | cdata = data[@name()] or data._template?[@name()] 336 | switch @getDataStatus(cdata) 337 | when "invalid" 338 | if opts.copy 339 | save_data[@name()] = null 340 | else 341 | throw new InvalidSaveDataException() 342 | 343 | when "empty" 344 | save_data[@name()] = null 345 | 346 | when "ok" 347 | 348 | # if _fulltext is already set, leave it, else set conceptName 349 | conceptFulltext = {} 350 | if cdata?._fulltext 351 | if cdata._fulltext?.string 352 | if cdata._fulltext?.string != '' 353 | conceptFulltext.string = cdata._fulltext.string 354 | if cdata._fulltext?.l10ntext 355 | if cdata._fulltext.l10ntext 356 | conceptFulltext.l10ntext = cdata._fulltext.l10ntext 357 | if cdata._fulltext?.text 358 | if cdata._fulltext.text 359 | conceptFulltext.text = cdata._fulltext.text 360 | else 361 | conceptFulltext.text = cdata.conceptName.trim() 362 | 363 | # if _standard is already set, leave it 364 | conceptStandard = {} 365 | if cdata?._standard 366 | if cdata._standard?.l10ntext 367 | if cdata._standard.l10ntext 368 | conceptStandard.l10ntext = cdata._standard.l10ntext 369 | else 370 | # only set .text, if l10ntext is not set (else easydb-error!) 371 | if cdata._standard?.text 372 | if cdata._standard?.text != '' 373 | conceptStandard.text = cdata._standard.text 374 | #else 375 | # conceptStandard.text = cdata.conceptName.trim() # (else easydb-error!) 376 | 377 | 378 | # save the frontend-language (display-purposes) 379 | frontendLanguages = ez5.loca.getLanguage() 380 | frontendLanguages = frontendLanguages.split('-') 381 | frontendLanguage = frontendLanguages[0] 382 | 383 | # save the eventually manual chosen label 384 | conceptNameChosenByHand = false 385 | if cdata?.conceptNameChosenByHand 386 | if cdata.conceptNameChosenByHand == true 387 | conceptNameChosenByHand = true 388 | 389 | # build savedata 390 | save_data[@name()] = 391 | conceptName: cdata.conceptName.trim() 392 | conceptURI: cdata.conceptURI.trim() 393 | frontendLanguage: frontendLanguage 394 | _fulltext: conceptFulltext 395 | _standard: conceptStandard 396 | 397 | # hierarchical ancestors 398 | if cdata?.conceptAncestors 399 | if cdata.conceptAncestors.length > 0 400 | save_data[@name()]['conceptAncestors'] = cdata.conceptAncestors 401 | 402 | if cdata?.conceptNameChosenByHand 403 | if cdata.conceptNameChosenByHand == true 404 | save_data[@name()]['conceptNameChosenByHand'] = true 405 | 406 | ####################################################################### 407 | # update result in Masterform 408 | __updateResult: (cdata, layout, opts) -> 409 | that = @ 410 | if opts.data 411 | opts.data[that.name(opts)] = cdata 412 | # if field is not empty 413 | if cdata?.conceptURI 414 | # die uuid einkürzen.. 415 | displayURI = cdata.conceptURI 416 | if displayURI.length > 20 417 | displayURI = displayURI.replace('http://', '') 418 | displayURI = displayURI.replace('https://', '') 419 | if displayURI.length > 30 420 | displayURI = displayURI.replace('uri.gbv.de/terminology/', '…/') 421 | 422 | 423 | info = new CUI.VerticalLayout 424 | class: 'ez5-info_commonPlugin' 425 | top: 426 | content: 427 | new CUI.Label 428 | text: cdata.conceptName 429 | multiline: true 430 | bottom: 431 | content: 432 | new CUI.Button 433 | name: "outputButtonHref" 434 | appearance: "flat" 435 | size: "normal" 436 | text: displayURI 437 | tooltip: 438 | markdown: true 439 | placement: 'w' 440 | content: (tooltip) -> 441 | # get jskos-details-data 442 | encodedURI = encodeURIComponent(cdata.conceptURI) 443 | extendedInfo_xhr = { "xhr" : undefined } 444 | if typeof that.__getAdditionalTooltipInfo == "function" 445 | that.__getAdditionalTooltipInfo(encodedURI, tooltip, extendedInfo_xhr) 446 | # loader, unteil details are xhred 447 | new CUI.Label(icon: "spinner", text: $$('custom.data.type.commons.modal.form.popup.loadingstring')) 448 | onClick: (evt,button) => 449 | window.open cdata.conceptURI, "_blank" 450 | 451 | layout.replace(info, 'center') 452 | layout.addClass('ez5-linked-object-edit') 453 | options = 454 | class: 'ez5-linked-object-container' 455 | layout.__initPane(options, 'center') 456 | 457 | # if field is empty, display searchfield 458 | if ! cdata?.conceptURI 459 | suggest_Menu_directInput 460 | 461 | inputX = new CUI.Input 462 | class: "pluginDirectSelectEditInput" 463 | undo_and_changed_support: false 464 | name: "directSelectInput" 465 | content_size: false 466 | onKeyup: (input) => 467 | # do suggest request and show suggestions 468 | searchstring = input.getValueForInput() 469 | if typeof that.__updateSuggestionsMenu == "function" 470 | @__updateSuggestionsMenu(cdata, 0, searchstring, input, suggest_Menu_directInput, searchsuggest_xhr, layout, opts) 471 | # if not called from poolmanagerplugin 472 | if ! opts?.callfrompoolmanager 473 | inputX.render() 474 | 475 | # init suggestmenu 476 | suggest_Menu_directInput = new CUI.Menu 477 | element : inputX 478 | use_element_width_as_min_width: true 479 | class: "customDataTypeCommonsMenu" 480 | 481 | # init xhr-object to abort running xhrs 482 | searchsuggest_xhr = { "xhr" : undefined } 483 | 484 | layout.replace(inputX, 'center') 485 | layout.removeClass('ez5-linked-object-edit') 486 | options = 487 | class: '' 488 | layout.__initPane(options, 'center') 489 | 490 | # if deleted from another plugin, do NOT trigger, because that could lead in an endless loop 491 | if ! opts.deleteDataFromPlugin 492 | # did data change? 493 | that.__setEditorFieldStatus(cdata, layout) 494 | 495 | 496 | ####################################################################### 497 | # if something in form is in/valid, set this status to masterform 498 | __setEditorFieldStatus: (cdata, element) -> 499 | switch @getDataStatus(cdata) 500 | when "invalid" 501 | element.addClass("cui-input-invalid") 502 | else 503 | element.removeClass("cui-input-invalid") 504 | 505 | CUI.Events.trigger 506 | node: element 507 | type: "editor-changed" 508 | 509 | CUI.Events.trigger 510 | node: element 511 | type: "data-changed" 512 | 513 | @ 514 | 515 | ####################################################################### 516 | # renders details-output of record 517 | renderDetailOutput: (data, top_level_data, opts) -> 518 | @__renderButtonByData(data[@name()]) 519 | 520 | 521 | ####################################################################### 522 | # checks the form and returns status 523 | getDataStatus: (cdata) -> 524 | if (cdata) 525 | if cdata.conceptURI and cdata.conceptName 526 | # check url for valididy 527 | uriCheck = CUI.parseLocation(cdata.conceptURI) 528 | 529 | nameCheck = if cdata.conceptName then cdata.conceptName.trim() else undefined 530 | 531 | if uriCheck and nameCheck 532 | return "ok" 533 | 534 | if cdata.conceptURI.trim() == '' and cdata.conceptName.trim() == '' 535 | return "empty" 536 | 537 | return "invalid" 538 | return "empty" 539 | 540 | 541 | ####################################################################### 542 | # zeige die gewählten Optionen im Datenmodell unter dem Button an 543 | #getCustomDataOptionsInDatamodelInfo: (custom_settings) -> 544 | # if Object.keys(custom_settings).length == 0 545 | # ['Ohne Optionen'] 546 | 547 | 548 | class CustomDataTypeCommonFacet extends FieldFacet 549 | 550 | initOpts: -> 551 | super() 552 | @addOpts 553 | field: 554 | mandatory: true 555 | check: Field 556 | 557 | requestFacetWithLimit: (obj) -> 558 | limit: @getLimit() 559 | field: @_field.fullName()+".conceptName" 560 | sort: "count" 561 | type: "term" 562 | 563 | getObjects: (key=@name(), data=@data()) -> 564 | data[key]?.terms or [] 565 | 566 | renderObjectText: (object) -> 567 | object.term 568 | 569 | getObjectPath: (obj) -> 570 | [obj.term] 571 | 572 | name: -> 573 | @_field.fullName()+".conceptName" 574 | 575 | requestSearchFilter: (obj) -> 576 | bool: "must" 577 | fields: [ @_field.fullName()+".conceptName" ] 578 | type: "in" 579 | in: [ obj.term ] 580 | --------------------------------------------------------------------------------