├── .gitignore ├── LICENSE ├── README.md ├── images └── knowledgemap.jpg ├── 图谱构建脚本 └── 本地_爬虫+模板 │ ├── KG_introduce.py │ ├── Solid_alignment.py │ ├── __pycache__ │ ├── min_edit.cpython-37.pyc │ └── translate.cpython-37.pyc │ ├── agreement.py │ ├── change_node_param.py │ ├── change_node_param2.py │ ├── comd_detail2neo.py │ ├── entity_data.py │ ├── entity_solid.py │ ├── file │ ├── Languages.txt │ ├── Platforms.txt │ ├── StuQ技术图谱.txt │ ├── Technologies.txt │ ├── Tools.txt │ ├── abc.txt │ ├── abc_bak.txt │ ├── abc_bak2.txt │ ├── agreement.csv │ ├── com_model │ │ ├── command_model1_1.txt │ │ └── command_model2_1.txt │ ├── com_test │ │ ├── com_list.txt │ │ └── test.txt │ ├── comd_detail.txt │ ├── comd_no_detail.txt │ ├── commad_test.txt │ ├── command_model.txt │ ├── command_model2.txt │ ├── entity_align │ │ ├── tech_atr123 - 副本.txt │ │ ├── tech_atr123.txt │ │ ├── tech_atr_new.txt │ │ ├── tech_atr_格式化.txt │ │ ├── test_tech - 副本.txt │ │ ├── test_tech.txt │ │ ├── test_tech_relation _bak.txt │ │ └── test_tech_relation.txt │ ├── jieba_dic.txt │ ├── linux_serv_comd_dic.txt │ ├── linux_serv_file_dic.txt │ ├── my_service_test.txt │ ├── serv_detail.txt │ ├── serv_list.txt │ ├── service.txt │ ├── service_2.txt │ ├── service_3.txt │ ├── service_comd_file.txt │ ├── service_comd_test.txt │ ├── service_no_detail.txt │ ├── tech_comd_dic.txt │ ├── tech_comd_dic2.txt │ ├── tech_file_dic.txt │ ├── tech_server_dic.txt │ ├── technology.txt │ ├── technology2.txt │ ├── technology3.txt │ └── windows_serv.txt │ ├── file_count.py │ ├── get_neo4j_nodes.py │ ├── hadoop_linux_command.py │ ├── hadoop_linux_component.py │ ├── hadoop_linux_file.py │ ├── jieba_text.py │ ├── language2neo.py │ ├── linux_serv_comd.py │ ├── linux_serv_file.py │ ├── linux_serv_file2neo.py │ ├── linux_service_commad.py │ ├── min_edit.py │ ├── new_entity_ex.py │ ├── qulity_ex.py │ ├── readme.doc │ ├── readme.md │ ├── scrapy_test │ └── KG_test │ │ ├── KG_test │ │ ├── __init__.py │ │ ├── __pycache__ │ │ │ ├── __init__.cpython-37.pyc │ │ │ └── settings.cpython-37.pyc │ │ ├── items.py │ │ ├── middlewares.py │ │ ├── pipelines.py │ │ ├── settings.py │ │ └── spiders │ │ │ ├── KG_spider.py │ │ │ ├── __init__.py │ │ │ └── __pycache__ │ │ │ ├── KG_spider.cpython-37.pyc │ │ │ └── __init__.cpython-37.pyc │ │ └── scrapy.cfg │ ├── serv_commd_predict.py │ ├── serv_detail2neo.py │ ├── service.py │ ├── service_comd_file.py │ ├── service_relation2neo.py │ ├── service_spider.py │ ├── sftp-config.json │ ├── snap_test.py │ ├── tech2neo.py │ ├── tech_atr.py │ ├── tech_comd_dic.py │ ├── tech_relation2neo4j.py │ ├── tech_similar_count.py │ ├── translate2.py │ ├── windows_serv2neo4j.py │ └── words_similar.py └── 基于图谱的应用 └── kgmaker ├── .gitattributes ├── .gitignore ├── knowledgegraph.sql ├── mvnw ├── mvnw.cmd ├── pom.xml ├── readme.md └── src ├── main ├── java │ └── com │ │ └── warmer │ │ └── kgmaker │ │ ├── KgmakerApplication.java │ │ ├── config │ │ └── WebAppConfig.java │ │ ├── controller │ │ ├── BaseController.java │ │ ├── FileController.java │ │ ├── KGManagerController.java │ │ ├── NLPController.java │ │ └── QuestionController.java │ │ ├── dal │ │ ├── IKGraphRepository.java │ │ ├── IKnowledgegraphRepository.java │ │ └── impl │ │ │ └── KGraphRepository.java │ │ ├── entity │ │ └── QAEntityItem.java │ │ ├── query │ │ └── GraphQuery.java │ │ ├── service │ │ ├── IKGGraphService.java │ │ ├── IKnowledgegraphService.java │ │ ├── IQuestionService.java │ │ └── impl │ │ │ ├── KGGraphService.java │ │ │ └── KnowledgegraphService.java │ │ └── util │ │ ├── CSVUtil.java │ │ ├── DateUtil.java │ │ ├── ExcelUtil.java │ │ ├── FileResponse.java │ │ ├── FileResult.java │ │ ├── GraphPageRecord.java │ │ ├── ImageUtil.java │ │ ├── Neo4jConfig.java │ │ ├── Neo4jUtil.java │ │ ├── QiniuUploadService.java │ │ ├── QiniuUtil.java │ │ ├── R.java │ │ ├── StringUtil.java │ │ ├── TestUtility.java │ │ ├── TextProcessUtility.java │ │ ├── UploadUtil.java │ │ └── UuidUtil.java └── resources │ ├── application.yml │ ├── hanlp.properties │ ├── mapping │ └── Knowledgegraph.xml │ ├── static │ ├── css │ │ ├── blog │ │ │ ├── base.css │ │ │ ├── index.css │ │ │ └── m.css │ │ ├── element-style.css │ │ ├── index.css │ │ ├── manager.css │ │ ├── style.css │ │ └── x-index.css │ ├── images │ │ ├── gzbg.jpg │ │ ├── logo │ │ │ ├── login_bg.png │ │ │ ├── logo-0.png │ │ │ ├── logo-3.jpg │ │ │ ├── logo.png │ │ │ └── logo_o.png │ │ └── tan_weixin_qr_1.jpg │ ├── js │ │ ├── d3.v4.min.js │ │ ├── html2canvas.min.js │ │ ├── iconfont.js │ │ ├── index.js │ │ ├── jquery.min.js │ │ ├── kgbuilder.js │ │ ├── knowledgegraphbuilder.js │ │ ├── knowledgegraphbuilder.js___jb_tmp___ │ │ ├── sidebarAdmin.js │ │ └── vue.js │ ├── scripts │ │ ├── codemirror-cypher.js │ │ ├── codemirror.js │ │ ├── cy2neod3.js │ │ ├── cypher.datatable.js │ │ ├── data.js │ │ ├── jquery.dataTables.min.js │ │ ├── neo4d3.js │ │ ├── neod3-visualization.js │ │ ├── neod3.js │ │ ├── sweet-alert.min.js │ │ └── vendor.js │ ├── styles │ │ ├── codemirror-neo.css │ │ ├── codemirror.css │ │ ├── cy2neo.css │ │ ├── datatable.css │ │ ├── fonts │ │ │ ├── FontAwesome.otf │ │ │ ├── fontawesome-webfont.eot │ │ │ ├── fontawesome-webfont.svg │ │ │ ├── fontawesome-webfont.ttf │ │ │ └── fontawesome-webfont.woff │ │ ├── gh-fork-ribbon.css │ │ ├── images │ │ │ └── maze-black.png │ │ ├── neod3.css │ │ ├── sweet-alert.css │ │ └── vendor.css │ └── wangeditor │ │ ├── fonts │ │ └── w-e-icon.woff │ │ ├── wangEditor.css │ │ ├── wangEditor.js │ │ ├── wangEditor.min.css │ │ ├── wangEditor.min.js │ │ └── wangEditor.min.js.map │ └── templates │ ├── kg │ ├── demoforfont-end.html │ ├── home.html │ ├── index.html │ ├── popse.html │ └── test.html │ └── share │ ├── focus.html │ ├── footer.html │ ├── header.html │ ├── header.html___jb_tmp___ │ ├── headerAdmin.html │ ├── layout.html │ ├── layout.html___jb_tmp___ │ ├── layout3.html │ └── layoutAdmin.html └── test └── java └── com └── warmer └── kgmaker ├── HandLPTest.java └── KgmakerApplicationTests.java /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # TypeScript v1 declaration files 45 | typings/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Microbundle cache 57 | .rpt2_cache/ 58 | .rts2_cache_cjs/ 59 | .rts2_cache_es/ 60 | .rts2_cache_umd/ 61 | 62 | # Optional REPL history 63 | .node_repl_history 64 | 65 | # Output of 'npm pack' 66 | *.tgz 67 | 68 | # Yarn Integrity file 69 | .yarn-integrity 70 | 71 | # dotenv environment variables file 72 | .env 73 | .env.test 74 | 75 | # parcel-bundler cache (https://parceljs.org/) 76 | .cache 77 | 78 | # Next.js build output 79 | .next 80 | 81 | # Nuxt.js build / generate output 82 | .nuxt 83 | dist 84 | 85 | # Gatsby files 86 | .cache/ 87 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 88 | # https://nextjs.org/blog/next-9-1#public-directory-support 89 | # public 90 | 91 | # vuepress build output 92 | .vuepress/dist 93 | 94 | # Serverless directories 95 | .serverless/ 96 | 97 | # FuseBox cache 98 | .fusebox/ 99 | 100 | # DynamoDB Local files 101 | .dynamodb/ 102 | 103 | # TernJS port file 104 | .tern-port 105 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AIOps-Knowledge-Graph-WebData 2 | *** 3 | - 项目分为图谱构建与图谱应用两个模块 4 | - 每个模块代码放在不同的文件夹中 5 | *** 6 | ## 图谱构建 7 | - 图谱构建采用一下流程构建,具体代码与解释说明在文件中 8 | ![](./images/knowledgemap.jpg) 9 | ## 知识介绍应用 10 | ### 运行与启动 11 | #### 安装jdk 12 | 可参考:[https://blog.csdn.net/qq_42003566/article/details/82629570](https://blog.csdn.net/qq_42003566/article/details/82629570) 13 | #### 安装Neo4j 14 | 可参考:[https://www.cnblogs.com/ljhdo/p/5521577.html](https://www.cnblogs.com/ljhdo/p/5521577.html),注意开放外网访问 0.0.0.0 15 | #### IDEA 导入项目 16 | 导入成功后对着项目根目录,右键->maven->reimport,等待其执行完成,倘若下载jar包太慢,自己配置外部maven仓库[https://blog.csdn.net/liu_shi_jun/article/details/78733633](https://blog.csdn.net/liu_shi_jun/article/details/78733633) 17 | 以上配置在linux下配置自行百度 18 | #### 找到目录 src/main/resources 19 | 修改application.yml,neo4配置url,password,改成自己的,mysql这里没用到,可以不改 20 | #### 打包发布 21 | 在idea 右侧 有 maven project 工具栏,点击展开lifecycle-clean,然后install,等待完成后在控制台可以看见打包的目录,例如:[INFO] Installing F:\git\Neo4j\kgmaker\target\kgmaker-0.0.1-SNAPSHOT.jar 复制jar包,去windows 或者linux下 切换到jar包目录执行 jar包 java -jar xxx.jar 即可启动,想部署到tomcat自行百度,springboot配置外部tomcat 22 | #### 访问路径 23 | 启动后访问[http://localhost](http://localhost) 24 | -------------------------------------------------------------------------------- /images/knowledgemap.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/images/knowledgemap.jpg -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/KG_introduce.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | #这个脚本用来补全知识的介绍, 3 | from urllib import request 4 | from urllib.request import build_opener,ProxyHandler 5 | import requests 6 | import random 7 | import jieba 8 | import re 9 | import logging 10 | import json 11 | from bs4 import BeautifulSoup 12 | from py2neo import Graph,Node,Relationship 13 | 14 | # User_Agent列表 15 | user_agent_list = [ 16 | "Mozilla/5.0(Macintosh;IntelMacOSX10.6;rv:2.0.1)Gecko/20100101Firefox/4.0.1", 17 | "Mozilla/4.0(compatible;MSIE6.0;WindowsNT5.1)", 18 | "Opera/9.80(WindowsNT6.1;U;en)Presto/2.8.131Version/11.11", 19 | "Mozilla/5.0(Macintosh;IntelMacOSX10_7_0)AppleWebKit/535.11(KHTML,likeGecko)Chrome/17.0.963.56Safari/535.11", 20 | "Mozilla/4.0(compatible;MSIE7.0;WindowsNT5.1)", 21 | "Mozilla/4.0(compatible;MSIE7.0;WindowsNT5.1;Trident/4.0;SE2.XMetaSr1.0;SE2.XMetaSr1.0;.NETCLR2.0.50727;SE2.XMetaSr1.0)" 22 | ] 23 | 24 | # 产生一个随机User-Agent 25 | headers ={ 26 | 'User-Agent':random.choice(user_agent_list), 27 | 'Connection': 'close' 28 | } 29 | 30 | #获取每个知识url列表 31 | def get_article_url2(url,serv,serv_dic): 32 | req = requests.get(url,headers=headers) 33 | content = json.loads(req.text) 34 | for ele in content['result_vos']: 35 | order_detail(ele['url'],serv,serv_dic) 36 | req.close() 37 | 38 | def order_detail(url2,serv,serv_dic): 39 | # print(url2) 40 | req2 = requests.get(url2,headers=headers) 41 | # 发送请求 42 | soup2 = BeautifulSoup(req2.text, 'html.parser') 43 | article2 = soup2.select('article p') 44 | # txt2 = re.split('。|!|?',"你好!我是笑。你是谁?") 45 | # print(":::".join(txt2)) 46 | for ele in article2: 47 | txt = ele.get_text() 48 | txt = txt.strip().replace(" ","").replace("\xa0","") 49 | txt = txt.lower() 50 | txt2 = re.split('。|!|?',txt) 51 | for ele2 in txt2: 52 | #采用摸版1 53 | #server_detail_rule1(url2,ele2,serv,serv_dic) 54 | comd_detail_rule1(url2,ele2,serv,serv_dic) 55 | 56 | def server_detail_rule1(url2,ele2,serv,serv_dic): 57 | if ele2 == (serv+"是什么") or ele2 == (serv+"服务是什么"): 58 | return 59 | elif serv+"服务是" in ele2: 60 | print(ele2) 61 | add2dic(serv,serv_dic,ele2) 62 | elif serv+"是" in ele2: 63 | print(ele2) 64 | add2dic(serv,serv_dic,ele2) 65 | 66 | def comd_detail_rule1(url2,ele2,serv,serv_dic): 67 | if ele2 == (serv+"是什么") or ele2 == (serv+"命令是什么") or ele2 ==(serv+"是什么?"): 68 | return 69 | else: 70 | every = re.split(',|,',ele2) 71 | for every_one in every: 72 | if every_one.startswith(serv+"命令是"): 73 | print(ele2) 74 | add2dic(serv,serv_dic,ele2) 75 | 76 | 77 | #向字典添加元素 78 | def add2dic(serv,tech_dic,commond): 79 | if commond in tech_dic[serv]: 80 | tech_dic[serv][commond] = tech_dic[serv][commond] + 1 81 | else: 82 | tech_dic[serv][commond] = 1 83 | 84 | logging.basicConfig(filename = "./file/serv_detail.log",level = logging.CRITICAL) 85 | logger = logging.getLogger() 86 | #字典:用来存储服务与detail的次数关系:{"cron":{"crond":10,"vi":1}} 87 | serv_dic={} 88 | # tech_f = open("./file/service_no_detail.txt",encoding='utf8') 89 | tech_f = open("./file/comd_no_detail.txt",encoding='utf8') 90 | # #页数 91 | i=1 92 | try: 93 | tech_content=tech_f.read().split("\n") 94 | for ele in tech_content: 95 | ele = ele.lower() 96 | tech = ele.replace(" ","%20") 97 | serv_dic[ele]={} 98 | #url = 'https://so.csdn.net/api/v2/search?q='+tech+'%E6%9C%8D%E5%8A%A1%E6%98%AF%E4%BB%80%E4%B9%88&t=blog&p=1&s=0&tm=0&lv=-1&ft=0&l=&u=&platform=pc' 99 | url2 = 'https://so.csdn.net/api/v2/search?q='+tech+'%E5%91%BD%E4%BB%A4%E6%98%AF%E4%BB%80%E4%B9%88&t=blog&p=1&s=0&tm=0&lv=-1&ft=0&l=&u=&platform=pc' 100 | print(url2) 101 | get_article_url2(url2,ele,serv_dic) 102 | logger.critical(ele+":::"+str(serv_dic[ele])) 103 | logger.critical("分界线") 104 | finally: 105 | tech_f.close() -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/Solid_alignment.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | from py2neo import Graph,Node,Relationship 3 | from min_edit import edit 4 | 5 | graph = Graph("http://localhost:7474",auth=("neo4j","SSPKUsspku12345")) 6 | 7 | serv_node = graph.nodes.match("服务") 8 | node_name = graph.run('MATCH (n:`服务`) RETURN n.name').data() 9 | # print(type(node_name)) 10 | 11 | def my_union(str,name1,name2): 12 | comd1_name = graph.run('MATCH (n:`服务`)-[]-(m:`'+str+'`) where n.name="'+name1+'" RETURN m.name').data() 13 | comd2_name = graph.run('MATCH (n:`服务`)-[]-(m:`'+str+'`) where n.name="'+name2+'" RETURN m.name').data() 14 | unin=0 15 | for data1 in comd1_name: 16 | for data2 in comd2_name: 17 | if data1['m.name'] == data2['m.name']: 18 | unin = unin + 1 19 | mylist = [] 20 | mylist.append(unin) 21 | mylist.append(len(comd1_name)+len(comd2_name)) 22 | return mylist 23 | 24 | 25 | for i in range(1,len(node_name)): 26 | if node_name[i] == "服务": 27 | continue 28 | for j in range(i+1,len(node_name)): 29 | if node_name[j] == "服务": 30 | continue 31 | name1 = node_name[i]['n.name'] 32 | name2 = node_name[j]['n.name'] 33 | length1 = len(name1) 34 | length2 = len(name2) 35 | sum_1 = length1 + length2 36 | # print(name1) 37 | abc = (sum_1 - edit(name1,name2))/sum_1 38 | 39 | mylist = my_union('命令',name1,name2) 40 | if mylist[1] == 0: 41 | continue 42 | elif abc*0.5 + 0.5*mylist[0]/mylist[1] > 0.7: 43 | print(name1+":::"+name2+":::"+str(abc)) 44 | 45 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/__pycache__/min_edit.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/图谱构建脚本/本地_爬虫+模板/__pycache__/min_edit.cpython-37.pyc -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/__pycache__/translate.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/图谱构建脚本/本地_爬虫+模板/__pycache__/translate.cpython-37.pyc -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/agreement.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | import csv 3 | from py2neo import Graph,Node,Relationship 4 | 5 | #把协议实体写入neo4j 6 | 7 | f = csv.reader(open('./file/agreement.csv','r')) 8 | graph = Graph("http://localhost:7474",auth=("neo4j","12345")) 9 | root_node = graph.nodes.match("协议",name="协议").first() 10 | ser_node = graph.nodes.match("服务",name="服务").first() 11 | 12 | try: 13 | for i in f: 14 | name = i[0] 15 | detail = i[1] 16 | node=Node("协议", name=name, detail=detail) 17 | ab = Relationship(root_node, "包含协议", node) 18 | graph.create(ab) 19 | if i[2]: 20 | # print(i[2]) 21 | service = graph.nodes.match("服务",name=i[2]).first() 22 | if service: 23 | ab2 = Relationship(service, "服务使用协议", node) 24 | else: 25 | print(i[2]) 26 | ser = Node("服务", name=i[2]) 27 | ab3 = Relationship(ser_node, "服务使用协议", ser) 28 | graph.create(ab3) 29 | ab2 = Relationship(ser, "服务使用协议", node) 30 | graph.create(ab2) 31 | 32 | 33 | finally: 34 | f.close() -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/change_node_param.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | 3 | #用于统一命令参数格式(linux) 4 | 5 | from py2neo import Graph,Node,Relationship 6 | 7 | graph = Graph("http://localhost:7474",auth=("neo4j","12345")) 8 | 9 | com_node = graph.nodes.match("命令",platform='linux') 10 | 11 | for ele in com_node: 12 | parameter = ele['parameter'] 13 | # print(ele) 14 | if parameter: 15 | parameter2 = str(parameter).replace('/','|||') 16 | ele.update({'parameter':parameter2}) 17 | graph.push(ele) -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/change_node_param2.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | 3 | #用于统一命令参数格式(hadoop) 4 | 5 | from py2neo import Graph,Node,Relationship 6 | 7 | graph = Graph("http://localhost:7474",auth=("neo4j","12345")) 8 | 9 | com_node = graph.nodes.match("命令",platform='hadoop') 10 | 11 | for ele in com_node: 12 | parameter = ele['parameter'] 13 | if parameter: 14 | parameter2 = str(parameter).replace('| ','||| ') 15 | print(parameter2) 16 | ele.update({'parameter':parameter2}) 17 | graph.push(ele) -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/comd_detail2neo.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | from py2neo import Graph,Node,Relationship 3 | 4 | #把命令的介绍录入neo4j 5 | 6 | f=open("./file/comd_detail.txt","r",encoding='utf8') 7 | 8 | #登录neo4j 9 | graph = Graph("http://localhost:7474",auth=("neo4j","SSPKUsspku12345")) 10 | 11 | try: 12 | content = f.read() 13 | relation_list = content.split('\n') 14 | # print(len(relation_list)) 15 | i = 0 16 | for evey_list in relation_list: 17 | serv_list = evey_list.split(":::") 18 | name = serv_list[0] 19 | serv_dic = eval(serv_list[1]) 20 | serv_str = "" 21 | for k,v in serv_dic.items(): 22 | serv_str = serv_str+k if serv_str=="" else serv_str+"。"+k 23 | print(serv_str) 24 | serv_node = graph.nodes.match("命令",name=name).first() 25 | serv_node['detail'] = serv_str 26 | graph.push(serv_node) 27 | 28 | 29 | 30 | finally: 31 | f.close() -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/entity_data.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | import json 3 | import MySQLdb 4 | #实体属性准备 5 | db = MySQLdb.connect(host="localhost", user="root", passwd="12345", db="staffjoy_account", charset='utf8' ) 6 | file = open("./file/entity_align/tech_atr_new.txt",encoding='utf8',mode='r') 7 | 8 | def mysort(dic): 9 | new_dict= sorted(dic.items(), key=lambda d:d[1], reverse = True) 10 | return new_dict 11 | 12 | # dic = {'a':31, 'bc':5, 'c':3, 'asd':4, 'aa':74, 'd':0} 13 | # mydict= sorted(dic.items(), key=lambda d:d[1], reverse = True) 14 | # print(mydict[0][0]) 15 | 16 | 17 | 18 | try: 19 | # print() 20 | content_list = file.read().split("\n") 21 | cursor = db.cursor() 22 | i = 0 23 | while i < len(content_list): 24 | edi = "" 25 | aut = "" 26 | w = "" 27 | #name 28 | keys = list(eval(content_list[i]).keys()) 29 | # print(type(keys)) 30 | name = keys[0] 31 | #edition 32 | edition = mysort(eval(content_list[i])[name]) 33 | if edition: 34 | edi = edition[0][0] 35 | i = i + 1 36 | #author 37 | author = mysort(eval(content_list[i])[name]) 38 | if author: 39 | aut = author[0][0] 40 | i = i + 1 41 | #web 42 | web = mysort(eval(content_list[i])[name]) 43 | if web: 44 | w = web[0][0] 45 | i = i + 2 46 | sql = "Insert into entity (name,author,web,edition) values ('"+name+"','"+aut+"','"+w+"','"+edi+"')" 47 | # print(sql) 48 | cursor.execute(sql) 49 | # print(edi+"----------"+aut+"----------"+w) 50 | 51 | finally: 52 | db.commit() 53 | cursor.close() 54 | db.close() 55 | file.close() -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/entity_solid.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | import json 3 | import MySQLdb 4 | from min_edit import edit 5 | db = MySQLdb.connect(host="localhost", user="root", passwd="12345", db="staffjoy_account", charset='utf8' ) 6 | # file = open("./file/entity_align/tech_atr_new.txt",encoding='utf8',mode='r') 7 | 8 | def jaccard_sim(a, b): 9 | unions = len(set(a).union(set(b))) 10 | intersections = len(set(a).intersection(set(b))) 11 | if unions == 0: 12 | return 0 13 | else: 14 | return 1 * intersections / unions 15 | 16 | def mysort(entity1,entity2): 17 | r = 0 18 | for i in range(0, len(entity1[0])): 19 | res = jaccard_sim(entity1[0][i],entity2[0][i]) 20 | r = r + res 21 | if r/4>0.6: 22 | print(entity1[0][0]+":::"+entity2[0][0]+":::"+str(r/4)) 23 | r = 0 24 | 25 | try: 26 | # content_list = file.read().split("\n") 27 | cursor = db.cursor() 28 | sql0 = "select name from entity" 29 | cursor.execute(sql0) 30 | content_list = cursor.fetchall() 31 | # print(content_list[0][0]) 32 | for i in range(0, len(content_list)): 33 | sql1 = "select name,author,web,edition from entity where name='"+content_list[i][0]+"'" 34 | cursor.execute(sql1) 35 | results1 = cursor.fetchall() 36 | # name1 = results[0][1] 37 | # author1 = results[0][2] if results[0][2] else "" 38 | # web1 = results[0][3] if results[0][3] else "" 39 | # edition1 = results[0][4] if results[0][4] else "" 40 | # entity1 = [] 41 | # entity1.append(name1).append(author1).append(web1).append(edition1) 42 | for j in range(i+1, len(content_list)): 43 | sql2 = "select name,author,web,edition from entity where name='"+content_list[j][0]+"'" 44 | cursor.execute(sql2) 45 | results2 = cursor.fetchall() 46 | # name2 = results[0][1] 47 | # author2 = results[0][2] if results[0][2] else "" 48 | # web2 = results[0][3] if results[0][3] else "" 49 | # edition2 = results[0][4] if results[0][4] else "" 50 | # entity2 = [] 51 | # entity2.append(name2).append(author2).append(web2).append(edition2) 52 | res = mysort(results1,results2) 53 | # print(res) 54 | # sql = "select * from entity where name='html'" 55 | # cursor.execute(sql) 56 | # results = cursor.fetchall() 57 | # print(results[0][1]) 58 | 59 | finally: 60 | cursor.close() 61 | db.close() 62 | # file.close() -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/Languages.txt: -------------------------------------------------------------------------------- 1 | C# 4.0 2 | Java language end of life 3 | Functional languages 4 | Concurrent languages 5 | DSL’s 6 | Javascript as a firstclass language 7 | Ruby/Jruby 8 | Javascript as a fi rst-class language 9 | DSLs 10 | Groovy 11 | F# 12 | Clojure 13 | Scala 14 | HTML5 15 | HTML 5 16 | JRuby 17 | JavaScript as a first class language 18 | Ruby 19 | SASS, SCSS, and LESS 20 | HAML 21 | Javascript as a first class language 22 | Domain-specific languages 23 | Coffeescript 24 | Future of Java 25 | Logic in stored procedures 26 | CoffeeScript 27 | ClojureScript 28 | Functional Java 29 | Domain-Specific Languages 30 | Scratch, Alice, and Kodu 31 | Twitter Bootstrap 32 | Sinatra 33 | AngularJS and Knockout 34 | Require.js 35 | Dropwizard 36 | Jekyll 37 | HTML5 for offline applications 38 | Lua 39 | RubyMotion 40 | Gremlin 41 | JavaScript as a platform 42 | AngularJS 43 | Core Async 44 | HAL 45 | Hive 46 | Nancy 47 | Pester 48 | Play Framework 2 49 | Q & Bluebird 50 | R as Compute Platform 51 | Elm 52 | Julia 53 | Om 54 | Pointer Events 55 | Python 3 56 | Rust 57 | Spray/akka-http 58 | Spring Boot 59 | TypeScript 60 | Wolfram Language 61 | Dashing 62 | Django REST 63 | Ionic Framework 64 | Nashorn 65 | React.js 66 | Retrofit 67 | Ember.js 68 | Flight.js 69 | Haskell Hadoop library 70 | Lotus 71 | Reagent 72 | Swift 73 | Enlive 74 | SignalR 75 | Axon 76 | Frege 77 | HyperResource 78 | Material UI 79 | OkHttp 80 | React Native 81 | TLA+ 82 | Traveling Ruby 83 | Butterknife 84 | Dagger 85 | Dapper 86 | Fetch 87 | Redux 88 | Robolectric 89 | Alamofire 90 | Aurelia 91 | Cylon.js 92 | Elixir 93 | GraphQL 94 | Immutable.js 95 | Recharts 96 | JSPatch 97 | Enzyme 98 | Phoenix 99 | Quick and Nimble 100 | ECMAScript 2017 101 | JuMP 102 | Physical Web 103 | Rapidoid 104 | ReSwift 105 | Three.js 106 | Vue.js 107 | WebRTC 108 | AssertJ 109 | Avro 110 | CSS Grid Layout 111 | CSS Modules 112 | Jest 113 | Kotlin 114 | Spring Cloud 115 | Android Architecture Components 116 | ARKit/ARCore 117 | Atlas and BeeHive 118 | Caffe 119 | Clara rules 120 | CSS-in-JS 121 | Digdag 122 | Druid 123 | ECharts 124 | Gobot 125 | Instana 126 | Keras 127 | LeakCanary 128 | PostCSS 129 | PyTorch 130 | single-spa 131 | Solidity 132 | TensorFlow Mobile 133 | Truffle 134 | Weex 135 | Apollo 136 | Hyperledger Composer 137 | OpenZeppelin 138 | Flutter 139 | Hyperapp 140 | Rasa 141 | Reactor 142 | RIBs 143 | SwiftNIO 144 | Tensorflow Eager Execution 145 | TensorFlow Lite 146 | troposphere 147 | WebAssembly 148 | Apache Beam 149 | Formik 150 | HiveRunner 151 | joi 152 | Ktor 153 | Laconia 154 | Puppeteer 155 | Resilience4j 156 | Room 157 | WebFlux 158 | Aeron 159 | Arrow 160 | Chaos Toolkit 161 | Dask 162 | Embark 163 | fastai 164 | http4k 165 | Immer 166 | Karate 167 | Micronaut 168 | Next.js 169 | Pose 170 | react-testing-library 171 | ReasonML 172 | Taiko 173 | Vapor 174 | jest-when 175 | React Hooks 176 | React Testing Library 177 | Styled components 178 | Tensorflow 179 | Fairseq 180 | Flair 181 | Gatsby.js 182 | KotlinTest 183 | NestJS 184 | Paged.js 185 | Quarkus 186 | SwiftUI 187 | Testcontainers 188 | Exposed 189 | GraphQL Inspector 190 | Koin 191 | Sarama 192 | Clinic.js Bubbleprof 193 | Deequ 194 | ERNIE 195 | MediaPipe 196 | Tailwind CSS 197 | Tamer 198 | Wire 199 | XState 200 | JavaScript MV* frameworks 201 | Require.js & NPM 202 | OWIN 203 | Go language 204 | Reactive Extensions across languages 205 | Web API 206 | Yeoman 207 | Django Rest 208 | netty 209 | python 210 | C++ 211 | C 212 | php 213 | go 214 | ruby 215 | perl 216 | plsql 217 | scala 218 | earlang 219 | golang 220 | springmvc 221 | springboot 222 | springcloud 223 | dubbo 224 | .net 225 | AppleScript 226 | JAVA 227 | C/C++ 228 | VB 229 | Visual Basic .NET 230 | Visual C# 231 | ASP.NET 232 | J# 233 | Objective-C -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/Platforms.txt: -------------------------------------------------------------------------------- 1 | Android 2 | JVM as platform 3 | Firefox 4 | Cloud 5 | iPhone 6 | HTML 5 7 | Non-relational databases 8 | Rich Internet applications 9 | RDF & SPARQL 10 | Google as corporate platform 11 | Location based services 12 | Chrome OS 13 | Chrome 14 | IE8 15 | IE6 end of life 16 | Platform roadmaps 17 | ALT.NET 18 | android 19 | EC2 & S3 20 | Large format mobile devices 21 | Facebook as business platform 22 | Application appliances 23 | Google App Engine 24 | mobile web 25 | Rich Internet Applications 26 | Azure 27 | GWT 28 | WS-* beyond basic profi le 29 | Rich internet applications 30 | WS-* beyond basic profile 31 | Mobile Web 32 | GPGPU 33 | App containers 34 | OAuth 35 | RDFa 36 | iPad 37 | KVM 38 | Atom 39 | IE6 End of Life 40 | Heroku 41 | Node.js 42 | vFabric 43 | OpenStack 44 | RIA 45 | JRuby 46 | ATOM 47 | AWS 48 | Mobile web 49 | Tablet (formerly iPad) 50 | Offline mobile webapps (just html5) 51 | Ubiquitous computing 52 | Cloud Foundry 53 | WS-* 54 | Java portal servers 55 | Domain-specific PaaS 56 | Linux containers 57 | Offline mobile webapps (just HTML5) 58 | Private clouds 59 | Tablet 60 | Windows Phone 7 61 | AppHarbor 62 | Hybrid clouds 63 | OpenSocial 64 | Single threaded servers with asynchronous I/O 65 | Riak 66 | MongoDB 67 | Continuous integration in the cloud 68 | Couchbase 69 | Calatrava 70 | Datomic 71 | Vert.x 72 | Open source IaaS 73 | BigQuery 74 | Windows Phone 75 | iBeacon 76 | PostgreSQL for NoSQL 77 | Private Clouds 78 | ARM Server SoC 79 | CoAP 80 | DigitalOcean 81 | Espruino 82 | EventStore 83 | Low-cost robotics 84 | Mapbox 85 | OpenID Connect 86 | SPDY 87 | Storm 88 | TOTP Two-Factor Authentication 89 | Web Components standard 90 | Apache Spark 91 | Cloudera Impala 92 | Apache Kylin 93 | Apache Mesos 94 | CoreCLR and CoreFX 95 | CoreOS 96 | Deis 97 | H2O 98 | Jackrabbit Oak 99 | Linux security modules 100 | MariaDB 101 | Netflix OSS Full stack 102 | OpenAM 103 | SDN 104 | Spark Photon/Spark Electron 105 | Text it as a service / Rapidpro.io 106 | Time series databases 107 | U2F 108 | AWS Lambda 109 | Fastly 110 | HSTS 111 | AWS ECS 112 | Ceph 113 | Kubernetes 114 | Mesosphere DCOS 115 | Microsoft Nano Server 116 | Particle Photon/Particle Electron 117 | Presto 118 | Rancher 119 | Pivotal Cloud Foundry 120 | Amazon API Gateway 121 | Bluetooth Mesh 122 | Deflect 123 | ESP8266 124 | MemSQL 125 | Nomad 126 | Realm 127 | Sandstorm 128 | TensorFlow 129 | Unity beyond gaming 130 | .NET Core 131 | Apache Flink 132 | AWS Application Load Balancer 133 | Cassandra carefully 134 | Electron 135 | Ethereum 136 | HoloLens 137 | IndiaStack 138 | Nuance Mix 139 | OpenVR 140 | Tarantool 141 | wit.ai 142 | Flood IO 143 | Google Cloud Platform 144 | Keycloak 145 | OpenTracing 146 | WeChat 147 | Azure Service Fabric 148 | Cloud Spanner 149 | Corda 150 | Cosmos DB 151 | DialogFlow 152 | GKE 153 | Hyperledger 154 | Kafka Streams 155 | Language Server Protocol 156 | LoRaWAN 157 | MapD 158 | Mosquitto 159 | Netlify 160 | PlatformIO 161 | TensorFlow Serving 162 | Voice platforms 163 | Windows Containers 164 | Contentful 165 | EMQ 166 | AWS Fargate 167 | Azure Stack 168 | Godot 169 | Interledger 170 | Mongoose OS 171 | TICK Stack 172 | Web Bluetooth 173 | EVM beyond Ethereum 174 | InfluxDB 175 | Istio 176 | CloudEvents 177 | Cloudflare Workers 178 | Deno 179 | Hot Chocolate 180 | Knative 181 | MinIO 182 | Prophet 183 | Quorum 184 | SPIFFE 185 | Tendermint 186 | TimescaleDB 187 | Apollo Auto 188 | GCP Pub/Sub 189 | ROS 190 | AWS Cloud Development Kit 191 | Azure DevOps 192 | Azure Pipelines 193 | Crowdin 194 | Crux 195 | Delta Lake 196 | Fission 197 | FoundationDB 198 | GraalVM 199 | Hydra 200 | Kuma 201 | MicroK8s 202 | Oculus Quest 203 | ONNX 204 | Rootless containers 205 | Snowflake 206 | Teleport 207 | Anka 208 | Argo CD 209 | eBPF 210 | Firebase 211 | OpenTelemetry 212 | Anthos 213 | Apache Pulsar 214 | Cosmos 215 | Google BigQuery ML 216 | JupyterLab 217 | Marquez 218 | Matomo 219 | MeiliSearch 220 | Stratos 221 | Trillian 222 | Hadoop 223 | Rackspace Cloud 224 | PhoneGap/Apache Cordova 225 | Vumi 226 | Zepto.js 227 | Hadoop as a service 228 | Akka 229 | Backend as a service 230 | Mesos 231 | ARM Server Soc 232 | Rapidpro.io 233 | TOTP Two factor 234 | IOS -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/agreement.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/图谱构建脚本/本地_爬虫+模板/file/agreement.csv -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/com_model/command_model1_1.txt: -------------------------------------------------------------------------------- 1 | {'(([a-zA-Z])*?)是linux系统专有的命令': 1, '(([a-zA-Z])*?)这个命令': 1, '命令是 (([a-zA-Z])*?)': 1, '命令(([a-zA-Z])*?)': 2, '命令:(([a-zA-Z])*?)': 1, '(([a-zA-Z])*?)命令': 118, '(([a-zA-Z])*?)具体命令': 2, '命令功能: (([a-zA-Z])*?)': 2, '命令是显示(([a-zA-Z])*?)': 2, '(([a-zA-Z])*?)和chmod命令': 1, '命令功能: 通过(([a-zA-Z])*?)': 1, '(([a-zA-Z])*?)比较目录的结果我们可以结合grep命令': 3, '命令格式:(([a-zA-Z])*?)': 1, '(([a-zA-Z])*?):文件操作命令': 1} -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/com_model/command_model2_1.txt: -------------------------------------------------------------------------------- 1 | {'中就可以使用(([a-zA-Z])*?)查看文件内容': 1, '到屏幕上是(([a-zA-Z])*?)的自定义目的': 1, ': tac是将(([a-zA-Z])*?)反写过来': 1, '标准输入结束: 2、(([a-zA-Z])*?)>filename': 1, '3、(([a-zA-Z])*?)>filename<': 1, 'sh的内容就是(([a-zA-Z])*?)生成的内容': 1, '通过查找(([a-zA-Z])*?)源代码': 1, '属性控制是由(([a-zA-Z])*?)来改变的': 4, '是否成功执行了(([a-zA-Z])*?)目录': 2, '来看看lsattr和(([a-zA-Z])*?)的man手册': 1, '一旦使用(([a-zA-Z])*?)成为只读文件': 1, '--quiet 当(([a-zA-Z])*?)不能改变文件': 1, '以上是(([a-zA-Z])*?)的用法': 1, '应该对于(([a-zA-Z])*?)的用法明白': 1, '用户可以使用(([a-zA-Z])*?)指令去变更': 1, '您可以使用(([a-zA-Z])*?)一次更改多': 1, '目录名称传递给(([a-zA-Z])*?)而不是文件名': 1, '以便(([a-zA-Z])*?)报告其工作': 1, '交由指令"(([a-zA-Z])*?)"进行校验': 1, '使用指令"(([a-zA-Z])*?)"计算文件': 1, '指定文件交由(([a-zA-Z])*?)演算': 2, '如果显示消息 (([a-zA-Z])*?): eof on yeqiongzhou1': 1, '2) (([a-zA-Z])*?): eof on file1 若': 1, '如果显示消息(([a-zA-Z])*?): eof on prog': 1, '让我们来看一下(([a-zA-Z])*?)是如何工作': 2, '要明白(([a-zA-Z])*?)比较结果的': 2, '黑色部分则是(([a-zA-Z])*?)的比较输出': 3, '如何读懂(([a-zA-Z])*?)': 1, '结果为 git 格式的(([a-zA-Z])*?)': 1, '使用(([a-zA-Z])*?)可以比较两个': 2, '的比较工具 (([a-zA-Z])*?)': 1, '我们使用(([a-zA-Z])*?)比较他们的': 1, '的结果直接送给(([a-zA-Z])*?)指令进行统计': 1, '然后使用(([a-zA-Z])*?)指令对结果': 1, '被修改了的(([a-zA-Z])*?)信息': 1, '的生成commit log以及(([a-zA-Z])*?)信息': 1, '看到commit log正确而(([a-zA-Z])*?)有问题的时候': 1, '结果就是pull request中(([a-zA-Z])*?)的信息就': 1, '这样调用(([a-zA-Z])*?)的时候可以': 1, '从而解决了(([a-zA-Z])*?)的问题': 1, '包含了错误的(([a-zA-Z])*?)信息发到': 1, '不会被这么一个(([a-zA-Z])*?)给吓到': 1, ' 补充说明:通过(([a-zA-Z])*?)指令': 1, '让(([a-zA-Z])*?)依序辨识这些': 1, '例子来介绍下(([a-zA-Z])*?)的常规用法': 1} -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/com_test/com_list.txt: -------------------------------------------------------------------------------- 1 | cat 2 | chattr 3 | chmod 4 | chown 5 | cksum 6 | cmp 7 | diff 8 | diffstat 9 | file 10 | find 11 | git 12 | gitview 13 | indent 14 | cut 15 | ln 16 | less 17 | locate 18 | lsattr 19 | mattrib 20 | mc 21 | mdel 22 | mdir 23 | mktemp 24 | more 25 | mmove 26 | mread 27 | mren 28 | mtools 29 | mtoolstest 30 | mv 31 | od 32 | paste 33 | patch 34 | rcp 35 | rm 36 | slocate 37 | split 38 | tee 39 | tmpwatch 40 | touch 41 | umask 42 | which 43 | cp 44 | whereis 45 | mcopy 46 | mshowfat 47 | rhmask 48 | scp 49 | awk 50 | read 51 | updatedb -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/com_test/test.txt: -------------------------------------------------------------------------------- 1 | 这里 cat 命令用于读取指定文件的内容并打印到终端输出,后面会详细讲它的使用。 2 | 使用 cd 命令可以切换目录,在 Linux 里面使用。 3 | 你可以使用 ls -a 命令查看隐藏文件。 4 | 使用 pwd 获取当前路径。 5 | 使用 cp (copy)命令复制一个文件或目录到指定目录。 6 | 如果直接使用 cp 命令,复制一个目录的话,会出现如下错误。 7 | 使用 cat , tac 和 nl 命令查看文件。 8 | 当我们执行一个 shell 命令行时通常会自动打开三个标准文件。 9 | ps命令用于报告当前系统的进程状态。 10 | 可以搭配kill指令随时中断、删除不必要的程序。 11 | ps命令是最基本同时也是非常强大的进程查看命令 12 | ls命令能够列出当前目录下的所有内容。 13 | Fortune命令是在相同命令行中作为fortune cookies 14 | Linux "toilet"命令 15 | 它比figlet命令的效果更有艺术感。 -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/comd_detail.txt: -------------------------------------------------------------------------------- 1 | hg:::{'hg命令是程序mercurial程序的执行命令': 1, 'hg命令是通过python实现的': 1} 2 | import:::{'import命令的提升行为的本质是,import命令是编译阶段执行的,在代码运行之前': 1} 3 | ssh:::{'ssh命令是openssh套件中的客户端连接工具,可以给予ssh加密协议实现安全的远程登录服务器': 1} 4 | mvn:::{'找到之后添加环境变量,mvn命令是需要java_home这个环境变量的:': 1} 5 | host:::{'host命令是常用的分析域名查询工具,可以用来测试域名系统工作是否正常': 2, 'host命令是一个用于执行dns查找的简单实用程序': 1} 6 | fs:::{'fs命令是stata的外部命令,如果是第一次使用,需要先进行安装': 1} 7 | jps:::{'jps命令是显示正在运行的java程序,root用户使用时显示所有运行的java程序,普通用户只显示自己启动的正在运行的java程序,而ps显示所有的进程,包括java程序': 1} 8 | ping:::{'ping命令是基于icmp协议来工作的,「icmp」全称为internet控制报文协议(internetcontrolmessageprotocol)': 1, '我们知道,ping命令是基于icmp协议来实现的': 1, '默认情况下,ping命令是以64字节大小的数据包来测试网络连通性的,可以使用参数-s选项修改默认数据包大小': 1} 9 | adb:::{'针对移动端android的测试,adb命令是很重要的一个点,必须将常用的adb命令熟记于心,将会为android测试带来很大的方便,其中很多命令将会用于自动化测试的脚本当中': 1, '简单点讲,adb命令是adb这个程序自带的一些命令,而adbshell则是调用的android系统中的命令,这些android特有的命令都放在了android设备的system/bin目录下,例如我再命令行中敲这样一个命令:': 1, '强调:针对移动端android的测试,adb命令是很重要的一个点,必须将常用的adb命令熟记于心,将会为android测试带来很大的方便,其中很多命令将会用于自动化测试的脚本当中,且也会用到执行rf脚本的命令当中': 1} 10 | yarn:::{'yarn命令是调用bin/yarn脚本文件,如果运行yarn脚本没有带任何参数,则会打印yarn所有命令的描述': 1} 11 | flushall:::{'flushall命令是清除redis中的所有数据': 1} 12 | bcp:::{'bcp命令是sqlserver提供的一个快捷的数据导入导出工具': 5, 'bcp命令是sql': 1} 13 | distcp:::{'distcp命令是用于集群内部或者集群之间拷贝数据的常用命令': 1} 14 | net:::{'net命令是一个命令行命令,net命令有很多函数用于实用和核查计算机之间的netbios连接,可以查看我们的管理网络环境、服务、用户、登陆等信息内容;要想获得net的help可以(1)在windows下可以用图形的方式,开始->帮助->索引->输入net;(2)在command下可以用字符方式:net/?或net或nethelp取得相应的方法的帮助': 1, 'net命令是很多网络命令的集合,在windowsme/nt/2000内,很多网络功能都是以net命令为开始的,通过nethelp可以看到这些命令的详细介绍:': 1} 15 | explain:::{'explain命令是查看优化器如何决定执行查询的主要方法': 1, 'explain命令是查看查询优化器如何决定执行查询的主要办法': 1, 'explain命令是mysql自带的一个命令,用于解释mysql将如何处理sql,执行顺序和是否使用了索引之类,我们平常可以用于sql调优': 1, 'explain命令是查看查询优化器如何决定执行查询的主要方法': 2} 16 | nc:::{'nc命令是ncat的软链接': 1, 'nc命令是很好用的网络工具': 1} 17 | scsi:::{'scsi命令是在commanddescriptorblock(cdb)中定义的': 1} 18 | grunt:::{'grunt命令是npm下载的grunt插件的命名,而不是node本身的命令所以在cmd中无法直接执行': 1} 19 | copy:::{'copy命令是sql*plus命令,可用于在oracle数据库、非oracle数据库之间数据的传输': 1} 20 | sealert:::{'sealert命令是setroubleshoot客户端工具,也就是selinux信息诊断客户端工具': 1} 21 | dsh:::{'dsh命令是为了我们能统一管理机器不用一台机器一台机器的弄': 1} 22 | getline:::{'getline命令是我个人认为awk最强大的一个命令': 1} 23 | xcodebuild:::{'xcodebuild命令是xcodecommandlinetools的一部分': 2} 24 | if:::{'if命令是批处理程序中条件判断的命令,根据得出的判断结果,执行相对应的操作': 1} 25 | q:::{'q命令是说匹配到一定的行之后退出sed命令': 1} 26 | jar:::{'jar命令是和tar命令相对的,是linux终端进行压缩和解压的命令': 1} 27 | netstat:::{'netstat命令是我们定位网络相关问题的利器,如果你还不会使用,那么最好花几分钟学习一下': 1, 'netstat命令是一个监控tcp/ip网络的非常有用的工具,它可以显示路由表、实际的网络连接以及每一个网络接口设备的状态信息': 1} 28 | pinky:::{'pinky命令是一个用户信息查找命令,它提供了所有登录用户的详细信息': 1} 29 | gcp:::{'如果您厌倦了使用标准cp命令拷贝大文件时的盲目等待,gcp命令是个不错的选择': 1} 30 | put:::{'put命令是由生产者执行,该命令把新的任务添加到beanstalkd中': 1} 31 | mail:::{'mail命令是命令行的电子邮件发送和接收工具': 1} 32 | dir:::{'dir命令是directory的缩写,功能是显示一个磁盘上全部或部分文件目录(文件或文件夹),所显示的文件信息包括文件名、扩展名、文件长度、文件建立或最后一次修改的日期和时间等,不显示文件的具体内容': 1, '别着急,想看清楚还不简单,dir命令是可以带参数的,键入dir/p试试看,看,这先计算机在显示完一屏内容后,停了下来': 1} 33 | display:::{'display命令是网络维护和故障处理的重要工具': 1} 34 | serverless:::{'注意:虽然一个serverlesscomponent是一个npm模块,我们可以通过package.json中的main属性指定项目中任意的文件入口,但是如果没有serverless.js文件,serverless命令是没法通过component指定的本地路径调试的': 1} -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/commad_test.txt: -------------------------------------------------------------------------------- 1 | cat 2 | chattr 3 | chmod 4 | chown 5 | cksum 6 | cmp 7 | diff 8 | diffstat 9 | file 10 | find 11 | git 12 | gitview 13 | indent 14 | cut 15 | ln 16 | less 17 | locate 18 | lsattr 19 | mattrib 20 | mc 21 | mdel 22 | mdir 23 | mktemp 24 | more 25 | mmove 26 | mread 27 | mren 28 | mtools 29 | mtoolstest 30 | mv 31 | od 32 | paste 33 | patch 34 | rcp 35 | rm 36 | slocate 37 | split 38 | tee 39 | tmpwatch 40 | touch 41 | umask 42 | which 43 | cp 44 | whereis 45 | mcopy 46 | mshowfat 47 | rhmask 48 | scp 49 | awk 50 | read 51 | updatedb -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/command_model.txt: -------------------------------------------------------------------------------- 1 | {'(([a-zA-Z])*?)命令': 62, '命令(([a-zA-Z])*?)': 1, '(([a-zA-Z])*?)字段提取命令': 1, '(([a-zA-Z])*?)和grep命令': 1, '(([a-zA-Z])*?)分页显示 命令:ps -ef |less 实例3:查看命令': 1, '命令导航到标有特定标记的文本位置: ma - 使用 a 标记文本的当前位置 ‘a - 导航到标记 a 处 上面就是linux下(([a-zA-Z])*?)': 1, '命令非常类似的命令--(([a-zA-Z])*?)': 1, '命令历史使用记录并通过(([a-zA-Z])*?)': 2, '(([a-zA-Z])*?)查找命令': 1, '命令来创建(([a-zA-Z])*?)': 1, '命令:(([a-zA-Z])*?)': 1, '(([a-zA-Z])*?): 未找到命令': 1} -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/command_model2.txt: -------------------------------------------------------------------------------- 1 | {'不要读取(([a-zA-Z])*?)的配置文件': 1, '就是默认的(([a-zA-Z])*?)参数了': 1, '作为运行(([a-zA-Z])*?)的确 省': 1, '使用的(([a-zA-Z])*?)参数': 1, '提示:(([a-zA-Z])*?)配置文件为': 1, '注意:(([a-zA-Z])*?)只允许间隔': 1, 'dev/sda2创建了一个(([a-zA-Z])*?)': 1, '同样的修改(([a-zA-Z])*?)': 1, '的是源目录(([a-zA-Z])*?)路径所占': 1, '但若使用了(([a-zA-Z])*?)时': 2, '但使用(([a-zA-Z])*?)可以随意浏览': 2, 'h-显示(([a-zA-Z])*?)的帮助文档': 2, '当使用(([a-zA-Z])*?)查看大文件': 2, '要退出(([a-zA-Z])*?)程序': 1, '使用(([a-zA-Z])*?)查找日志': 1, '终端键入 >(([a-zA-Z])*?)': 1, '进入(([a-zA-Z])*?)后': 1, '所以就想着是不是(([a-zA-Z])*?)要yum来': 1, '结果是nopackage(([a-zA-Z])*?)available': 1, '较差劲的是(([a-zA-Z])*?)所找到的': 1, '可以在使用(([a-zA-Z])*?)之前': 1, '也就是本地还没有(([a-zA-Z])*?)数据库': 1, '可执行(([a-zA-Z])*?)指令查询其': 1} -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/entity_align/test_tech - 副本.txt: -------------------------------------------------------------------------------- 1 | java 2 | css 3 | html 4 | html5 5 | css3 6 | js 7 | javascript 8 | IE 9 | ASP 10 | Android 11 | Firefox 12 | iPhone 13 | Chrome 14 | IE8 15 | C# 16 | OAuth 17 | Squid 18 | Neo4j 19 | mongoDB 20 | NoSQL 21 | GitHub 22 | ESB 23 | Ruby 24 | DSLs 25 | Groovy 26 | F# 27 | Scala 28 | Azure 29 | GWT 30 | Git 31 | Github 32 | Splunk 33 | Mercurial 34 | TLB 35 | Powershell 36 | Selenium 37 | Deltacloud 38 | Vagrant 39 | jQuery 40 | Sonar 41 | Gradle 42 | VCS 43 | JRuby 44 | ATOM 45 | KVM 46 | AWS 47 | Heroku 48 | Tablet 49 | vFabric 50 | OpenStack 51 | Node 52 | GPGPU 53 | WS 54 | SASS 55 | SCSS 56 | LESS 57 | HAML 58 | Coffeescript 59 | Atom 60 | iPad 61 | Mobile Web 62 | RDFa 63 | RIA 64 | Jade 65 | NuGet 66 | Highcharts 67 | D3 68 | Pig 69 | SaaS 70 | Locust 71 | Rake 72 | Riak 73 | PaaS 74 | MongoDB 75 | PowerShell 76 | BigQuery 77 | Hadoop 78 | iBeacon 79 | CartoDB 80 | Docker 81 | Flyway 82 | Foreman 83 | GenyMotion 84 | Gulp 85 | Moco 86 | Packer 87 | Zipkin 88 | axios 89 | Pinpoint 90 | Dagger 91 | Dapper 92 | Elixir 93 | Enzyme 94 | Phoenix 95 | Elm 96 | GraphQL 97 | JuMP 98 | Rapidoid 99 | Recharts 100 | ReSwift -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/entity_align/test_tech.txt: -------------------------------------------------------------------------------- 1 | ASP 2 | Android 3 | Firefox 4 | iPhone 5 | Chrome 6 | IE8 7 | C# 8 | OAuth 9 | Squid 10 | Neo4j 11 | mongoDB 12 | NoSQL 13 | GitHub 14 | ESB 15 | Ruby 16 | DSLs 17 | Groovy 18 | F# 19 | Scala 20 | Azure 21 | GWT 22 | Git 23 | Github 24 | Splunk 25 | Mercurial 26 | TLB 27 | Powershell 28 | Selenium 29 | Deltacloud 30 | Vagrant 31 | jQuery 32 | Sonar 33 | Gradle 34 | VCS 35 | JRuby 36 | ATOM 37 | KVM 38 | AWS 39 | Heroku 40 | Tablet 41 | vFabric 42 | OpenStack 43 | Node 44 | GPGPU 45 | WS 46 | SASS 47 | SCSS 48 | LESS 49 | HAML 50 | Coffeescript 51 | Atom 52 | iPad 53 | Mobile Web 54 | RDFa 55 | RIA 56 | Jade 57 | NuGet 58 | Highcharts 59 | D3 60 | Pig 61 | SaaS 62 | Locust 63 | Rake 64 | Riak 65 | PaaS 66 | MongoDB 67 | PowerShell 68 | BigQuery 69 | Hadoop 70 | iBeacon 71 | CartoDB 72 | Docker 73 | Flyway 74 | Foreman 75 | GenyMotion 76 | Gulp 77 | Moco 78 | Packer 79 | Zipkin 80 | axios 81 | Pinpoint 82 | Dagger 83 | Dapper 84 | Elixir 85 | Enzyme 86 | Phoenix 87 | Elm 88 | GraphQL 89 | JuMP 90 | Rapidoid 91 | Recharts 92 | ReSwift -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/jieba_dic.txt: -------------------------------------------------------------------------------- 1 | 配置信息 3 n 2 | 配置文件 5 n 3 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/linux_serv_comd_dic.txt: -------------------------------------------------------------------------------- 1 | {'atd': {'at': 25, 'auditctl': 12}, 'dovecot': {'newaliases': 1, 'service': 1, 'useradd': 1}, 'haldaemon': {}, 'irda': {}, 'ntpd': {'ntpdate': 4, 'date': 1, 'service': 1}, 'portmap': {'pgrep': 1, 'kill': 1, 'svn': 2, 'showmount': 3, 'rpm': 1}, 'sshd': {}, 'vsftpd': {'ftp': 8, 'vi': 1, 'rpm': 2, 'useradd': 4, 'chroot': 2, 'ldd': 3}, 'xinetd': {'ntsysv': 1, 'service': 2, 'rpm': 1}, 'yum-updatesd': {}, 'acpid': {}, 'chargen-dgram': {}, 'cpuspeed': {}, 'daytime-dgram': {}, 'echo-stream': {}, 'gpm': {}, 'ip6tables': {}, 'nfs': {'mount': 14, 'umount': 5, 'df': 3, 'exportfs': 7, 'who': 1, 'showmount': 4, 'kill': 1, 'service': 2, 'hostnamectl': 2, 'ifconfig': 3}, 'sendmail': {'telnet': 4, 'newaliases': 4, 'sendmail': 1, 'cp': 3}, 'squid': {}, 'anacron': {'auditctl': 18}, 'autofs': {'mount': 2, 'cd': 3}, 'capi': {}, 'chargen-stream': {}, 'firstboot': {}, 'httpd': {'apachectl': 2, 'kill': 1, 'chkconfig': 1, 'service': 2, 'httpd': 2, 'wc': 1}, 'irqbalance': {}, 'bluetooth': {}, 'restorecond': {}, 'rpcidmapd': {}, 'alsasound': {}, 'auditd': {'auditctl': 11}, 'crond': {'crontab': 30, 'command': 2, 'at': 16, 'su': 4}, 'cvs': {'chgrp': 1, 'htpasswd': 6, 'cp': 5}, 'hplip': {}, 'isdn': {'type': 6}, 'lvm2-monitor': {}, 'mdmonitor': {}, 'messagebus': {}, 'mysqld': {'wget': 1, 'install': 1, 'hostname': 1}, 'nfslock': {}, 'rdisc': {}, 'readahead_early': {}, 'readahead_later': {}, 'apmd': {'at': 2}, 'avahi-daemon': {}, 'daytime-slream': {}, 'echo-dgram': {}, 'hidd': {}, 'iptables': {'iptables': 8, 'restore': 4}, 'kudzu': {}, 'mcstrans': {}, 'mdmpd': {}, 'microcode _ctl': {}, 'named': {'rndc': 1, 'tail': 1, 'ifconfig': 1, 'dig': 1, 'nslookup': 1}, 'netfs': {}, 'rpcgssd': {}, 'rsync': {'rsync': 29, 'rpm': 1, 'nohup': 2}, 'setroubleshoot': {}, 'smb': {'mount': 1, 'pdbedit': 1, 'testparm': 2, 'smbclient': 2, 'rpm': 1}, 'network': {'hostname': 1}, 'pcscd': {}, 'psacct': {}, 'smartd': {}, 'syslog': {'dmesg': 4, 'logger': 3}, 'xfs': {}, 'ypbind': {'chkconfig': 2}} -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/my_service_test.txt: -------------------------------------------------------------------------------- 1 | ssh 2 | subversion 3 | squid 4 | mongodb -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/serv_list.txt: -------------------------------------------------------------------------------- 1 | ['it', 'web', 'itil', 'sshd', 'dhcp', 'ecs', 'nginx', 'nfs', 'crond', 'apache', 'cache', 'saas', 'rsync', 'ip', 'jenkins', 'dns', 'cdn', 'paas', 'lambda', 'openvpn', 'lvs', 'keepalived', 'squid', 'snmpd', 'bastion', 'api', 'servicenow', 'osg', 'smtp', 'oos', 'trace', 'rds', 'ops','dns', 'samba', 'nginx', 'sendmail', 'nis', 'telnetd', 'rpcbind', 'xrdp', 'vnc', 'vsftpd', 'xdmcp', 'systemd', 'vncserver', 'inetd', 'httpd', 'mysqld', 'network', 'xinet', 'chkconfig', 'openoffice', 'redis', 'redhat', 'internet', 'nfsrpc', 'dhcpd', 'oracle', 'snmpd', 'mongdb', 'mongodb', 'xdm', 'rpm', 'postfix', 'crontab', 'sftp', 'service', 'ssh', 'nfs', 'linux', 'ntp', 'rpc', 'apache', 'mysql', 'snmp', 'nagios', 'tftp', 'gdm', 'openssh', 'ntpd', 'iptables', 'ftp', 'tomcat', 'windows', 'xinetd', 'rstatd', 'php', 'httpd', 'telnet', 'vsftp', 'reloaduwsgi', 'dovecot', 'dns', 'samba', 'nginx', 'sendmail', 'nis', 'telnetd', 'rpcbind', 'xrdp', 'vnc', 'http', 'vsftpd', 'xdmcp', 'systemd', 'vncserver', 'inetd', 'httd', 'mysqld', 'network', 'xinet', 'chkconfig', 'openoffice', 'redis', 'redhat', 'internet', 'weblogic', 'nfsrpc', 'dhcpd', 'oracle', 'snmpd', 'xdm', 'rpm', 'aom', 'dhcp', 'sla', 'ecs', 'nginx', 'nfs', 'crond', 'apache', 'cache', 'httpdns', 'saas', 'rsync', 'ip', 'jenkins', 'dns', 'cdn', 'paas', 'lambda', 'openvpn', 'lvs', 'keepalived', 'squid', 'snmpd', 'bastion', 'api', 'servicenow', 'osg', 'smtp', 'trace', 'rds', 'ops'] -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/service.txt: -------------------------------------------------------------------------------- 1 | acpid 电源管理接口。如果是笔记本电脑用户,则建议开启,可以监听内核层的相关电源事件 开启 2 | anacron 系统的定时任务程序。是 cron 的一个子系统,如果定时任务错过了执行时间,则可以通过 anacron 继续唤醒执行 关闭 3 | alsasound alsa 声卡驱动。如果使用 alsa 声卡,则开启 关闭 4 | apmd 电源管理模块。如果支持 acpid,就不需要 apmd,可以关闭 关闭 5 | atd 指定系统在特定时间执行某个任务,只能执行一次。如果需要则开启,但我们一般使用 crond 来执行循环定时任务 关闭 6 | auditd 审核子系统。如果开启了此服务,那么 SELinux 的审核信息会写入 /var/log/audit/ audit.log 文件;如果不开启,那么审核信息会记录在 syslog 中 开启 7 | autofs 让服务器可以自动挂载网络中其他服务器的共享数据,一般用来自动挂载 NFS 服务。如果没有 NFS 服务,则建议关闭 关闭 8 | avahi-daemon avahi 是 zeroconf 协议的实现,它可以在没有 DNS 服务的局域网里发现基于 zeroconf 协议的设备和服务。除非有兼容设备或使用 zeroconf 协议,否则关闭 关闭 9 | bluetooth 蓝牙设备支持。一般不会在服务器上启用蓝牙设备,关闭它 关闭 10 | capi 仅对使用 ISND 设备的用户有用 关闭 11 | chargen-dgram 使用 UDP 协议的 chargen server。其主要提供类似远程打字的功能 关闭 12 | chargen-stream 同上 关闭 13 | cpuspeed 可以用来调整 CPU 的频率。当闲置时,可以自动降低 CPU 频率来节省电量 开启 14 | crond 系统的定时任务,一般的 Linux 服务器都需要定时任务来协助系统维护。建议开启 开启 15 | cvs 一个版本控制系统 关闭 16 | daytime-dgram 使用 TCP 协议的 daytime 守护进程,该协议为客户机实现从远程服务器获取日期和时间的功能 关闭 17 | daytime-slream 同上 关闭 18 | dovecot 邮件服务中 POP3/IMAP 服务的守护进程,主要用来接收信件。如果启动了邮件服务则开启:否则关闭 关闭 19 | echo-dgram 服务器回显客户服务的进程 关闭 20 | echo-stream 同上 关闭 21 | firstboot 系统安装完成后,有一个欢迎界面,需要对系统进行初始设定,这就是这个服务的作用。既然不是第一次启动了,则建议关闭 关闭 22 | gpm 在字符终端 (ttyl~tty6) 中可以使用鼠标复制和粘贴,这就是这个服务的功能 开启 23 | haldaemon 检测和支持 USB 设备。如果是服务器则可以关闭,个人机则建议开启 关闭 24 | hidd 蓝牙鼠标、键盘等蓝牙设备检测。必须启动 bluetooth 服务 关闭 25 | hplip HP 打印机支持,如果没有 HP 打印机则关闭 关闭 26 | httpd apache 服务的守护进程。如果需要启动 apache,就开启 开启 27 | ip6tables IPv6 的防火墙。目前 IPv6 协议并没有使用,可以关闭 关闭 28 | iptables 防火墙功能。Linux 中的防火墙是内核支持功能。这是服务器的主要防护手段,必须开启 开启 29 | irda IrDA 提供红外线设备(笔记本电脑、PDA’s、手机、计算器等)间的通信支持。建议关闭 关闭 30 | irqbalance 支持多核处理器,让 CPU 可以自动分配系统中断(IRQ),提高系统性能。目前服务器多是多核 CPU,请开启 开启 31 | isdn 使用 ISDN 设备连接网络。目前主流的联网方式是光纤接入和 ADSL,ISDN 己经非常少见,请关闭 关闭 32 | kudzu 该服务可以在开机时进行硬件检测,并会调用相关的设置软件。建议关闭,仅在需要时开启 关闭 33 | lvm2-monitor 该服务可以让系统支持LVM逻辑卷组,如果分区采用的是LVM方式,那么应该开启。建议开启 开启 34 | mcstrans SELinux 的支持服务。建议开启 开启 35 | mdmonitor 该服务用来监测 Software RAID 或 LVM 的信息。不是必需服务,建议关闭 关闭 36 | mdmpd 该服务用来监测 Multi-Path 设备。不是必需服务,建议关闭 关闭 37 | messagebus 这是 Linux 的 IPC (Interprocess Communication,进程间通信)服务,用来在各个软件中交换信息。建议关闭 关闭 38 | microcode _ctl Intel 系列的 CPU 可以通过这个服务支持额外的微指令集。建议关闭 关闭 39 | mysqld MySQL 数据库服务器。如果需要就开启;否则关闭 开启 40 | named DNS 服务的守护进程,用来进行域名解析。如果是 DNS 服务器则开启;否则关闭 关闭 41 | netfs 该服务用于在系统启动时自动挂载网络中的共享文件空间,比如 NFS、Samba 等。 需要就开启,否则关闭 关闭 42 | network 提供网络设罝功能。通过这个服务来管理网络,建议开启 开启 43 | nfs NFS (Network File System) 服务,Linux 与 Linux 之间的文件共享服务。需要就开启,否则关闭 关闭 44 | nfslock 在 Linux 中如果使用了 NFS 服务,那么,为了避免同一个文件被不同的用户同时编辑,所以有这个锁服务。有 NFS 时开启,否则关闭 关闭 45 | ntpd 该服务可以通过互联网自动更新系统时间.使系统时间永远准确。需要则开启,但不是必需服务 关闭 46 | pcscd 智能卡检测服务,可以关闭 关闭 47 | portmap 用在远程过程调用 (RPC) 的服务,如果没有任何 RPC 服务,则可以关闭。主要是 NFS 和 NIS 服务需要 关闭 48 | psacct 该守护进程支持几个监控进程活动的工具 关闭 49 | rdisc 客户端 ICMP 路由协议 关闭 50 | readahead_early 在系统开启的时候,先将某些进程加载入内存整理,可以加快启动速度 关闭 51 | readahead_later 同上 关闭 52 | restorecond 用于给 SELinux 监测和重新加载正确的文件上下文。如果开启 SELinux,则需要开启 关闭 53 | rpcgssd 与 NFS 有关的客户端功能。如果没有 NFS 就关闭 关闭 54 | rpcidmapd 同上 关闭 55 | rsync 远程数据备份守护进程 关闭 56 | sendmail sendmail 邮件服务的守护进程。如果有邮件服务就开启;否则关闭 关闭 57 | setroubleshoot 该服务用于将 SELinux 相关信息记录在日志 /var/log/messages 中。建议开启 开启 58 | smartd 该服务用于自动检测硬盘状态。建议开启 开启 59 | smb 网络服务 samba 的守护进程。可以让 Linux 和 Windows 之间共享数据。如果需要则开启 关闭 60 | squid 代理服务的守护进程。如果需要则开启:否则关闭 关闭 61 | sshd ssh 加密远程登录管理的服务。服务器的远程管理必须使用此服务,不要关闭 开启 62 | syslog 日志的守护进程 开启 63 | vsftpd vsftp 服务的守护进程。如果需要 FTP 服务则开启;否则关闭 关闭 64 | xfs 这是 X Window 的字体守护进程,为图形界面提供字体服务。如果不启动图形界面,就不用开启 关闭 65 | xinetd 超级守护进程。如果有依赖 xinetd 的服务,就必须开启 开启 66 | ypbind 为 NIS (网络信息系统)客户机激活 ypbind 服务进程 关闭 67 | yum-updatesd yum 的在线升级服务 关闭 68 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/service_comd_test.txt: -------------------------------------------------------------------------------- 1 | {'atd': {'at'}, 'dovecot': {}, 'haldaemon': {}, 'irda': {}, 'ntpd': {}, 'portmap': {}, 'sshd': {ssh}, 'vsftpd': {'ftp'}, 'xinetd': {}, 'yum-updatesd': {}, 'acpid': {}, 'chargen-dgram': {}, 'cpuspeed': {}, 'daytime-dgram': {}, 'echo-stream': {}, 'gpm': {}, 'ip6tables': {'ip6tables'}, 'nfs': {'mount', 'umount', 'exportfs', 'showmount'}, 'sendmail': {'newaliases', 'sendmail'}, 'squid': {'squid'}, 'anacron': {'anacron'}, 'autofs': {}, 'capi': {}, 'chargen-stream': {}, 'firstboot': {}, 'httpd': {'apachectl', 'httpd'}, 'irqbalance': {}, 'bluetooth': {'rfkill'}, 'restorecond': {}, 'rpcidmapd': {}, 'alsasound': {}, 'auditd': {'auditctl'}, 'crond': {'crontab'}, 'cvs': {}, 'hplip': {}, 'isdn': {}, 'lvm2-monitor': {}, 'mdmonitor': {}, 'messagebus': {}, 'mysqld': {mysql}, 'nfslock': {}, 'rdisc': {}, 'readahead_early': {}, 'readahead_later': {}, 'apmd': {'apmd'}, 'avahi-daemon': {}, 'daytime-slream': {}, 'echo-dgram': {}, 'hidd': {}, 'iptables': {'iptables'}, 'kudzu': {}, 'mcstrans': {}, 'mdmpd': {}, 'microcode _ctl': {}, 'named': {}, 'netfs': {}, 'rpcgssd': {}, 'rsync': {'rsync'}, 'setroubleshoot': {}, 'smb': {}, 'network': {}, 'pcscd': {}, 'psacct': {}, 'smartd': {}, 'syslog': {}, 'xfs': {}, 'ypbind': {}} -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/tech_comd_dic.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/图谱构建脚本/本地_爬虫+模板/file/tech_comd_dic.txt -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/tech_comd_dic2.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/图谱构建脚本/本地_爬虫+模板/file/tech_comd_dic2.txt -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/tech_file_dic.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/图谱构建脚本/本地_爬虫+模板/file/tech_file_dic.txt -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/tech_server_dic.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/图谱构建脚本/本地_爬虫+模板/file/tech_server_dic.txt -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file/technology.txt: -------------------------------------------------------------------------------- 1 | BASIC 2 | Pascal 3 | D 4 | ECMAScript 5 | Perl 6 | IOS 7 | AppleScript 8 | Adobe Air 9 | windows phone 10 | VB 11 | Visual Basic .NET 12 | Visual C# 13 | ASP.NET 14 | J# 15 | Java VM 16 | Ceylon 17 | Jython 18 | Kawa 19 | Lua 20 | JSP 21 | ASP 22 | Shell 23 | sh 24 | csh 25 | ksh 26 | sed 27 | awk 28 | bash 29 | Tcl 30 | Tk 31 | VBScript 32 | Dart 33 | JScript 34 | ActionScript 35 | Ada 36 | Forth 37 | AAuto 38 | ALGOL 39 | Intel 40 | AT&T 41 | Curl 42 | SGML 43 | XML 44 | XSLT 45 | SSML 46 | XHTML 47 | DHTML 48 | XML Schema 49 | LINQ 50 | T SQL 51 | XQuery 52 | PLSQL 53 | Visual FoxPro 54 | XSL 55 | AutoCAD 56 | Mathematica -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/file_count.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | f=open("./serv_file_dic.txt","r+") 3 | out = 0 4 | try: 5 | content = eval(f.read()) 6 | for ele in content: 7 | if content[ele]: 8 | for com in content[ele]: 9 | out = out + 1 10 | else: 11 | out = out + 1 12 | finally: 13 | f.close() 14 | print(out) 15 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/get_neo4j_nodes.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | from py2neo import Graph,Node,Relationship 3 | #登录neo4j 4 | graph = Graph("http://localhost:7474",auth=("neo4j","SSPKUsspku12345")) 5 | 6 | serv_nodes = graph.nodes.match("命令") 7 | 8 | for ele in serv_nodes: 9 | if ele['detail'] is "" or ele["detail"] is None: 10 | print(ele['name']) 11 | # print(ele) 12 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/hadoop_linux_command.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | from py2neo import Graph,Node,Relationship 3 | 4 | def build_nodes(nodes_record): 5 | data = {"id": str(nodes_record.get('n').identity), 6 | "label": next(iter(nodes_record.get('n').labels))} 7 | data.update(nodes_record.get('n')) 8 | return data 9 | 10 | 11 | #登录neo4j 12 | graph = Graph("http://localhost:7474",auth=("neo4j","12345")) 13 | 14 | graph2 = Graph("http://121.36.99.228:7474",auth=("neo4j","SSPKUsspku12345")) 15 | 16 | data = graph2.run("MATCH (m:command)-[r]->(n:command) where m.name='command' RETURN n").data() 17 | hadoop_node = graph.nodes.match("技术",name="hadoop").first() 18 | com_root = graph.nodes.match("命令",name="命令").first() 19 | for ele in data: 20 | new_data = build_nodes(ele) 21 | com_node = Node("命令", name=new_data['name'],detail=new_data['detail'],platform='hadoop',parameter=new_data['parameter']) 22 | ab = Relationship(com_root, "包含命令", com_node) 23 | ab2 = Relationship(hadoop_node, "技术有关的命令", com_node) 24 | graph.create(ab) 25 | graph.create(ab2) 26 | 27 | data2 = graph2.run("MATCH (m:component)-[r]->(n:command) where m.name='hdfs' RETURN n").data() 28 | hdfs_node = graph.nodes.match("技术",name="hdfs").first() 29 | for ele in data2: 30 | new_data2 = build_nodes(ele) 31 | name=new_data2['name'] 32 | node = graph.nodes.match("命令",name=name,platform='hadoop').first() 33 | ab2 = Relationship(hdfs_node, "技术有关的命令", node) 34 | graph.create(ab2) 35 | 36 | data3 = graph2.run("MATCH (m:component)-[r]->(n:command) where m.name='yarn' RETURN n").data() 37 | yarn_node = graph.nodes.match("技术",name="yarn").first() 38 | for ele in data3: 39 | new_data3 = build_nodes(ele) 40 | name=new_data3['name'] 41 | node = graph.nodes.match("命令",name=name,platform='hadoop').first() 42 | ab2 = Relationship(yarn_node, "技术有关的命令", node) 43 | graph.create(ab2) 44 | 45 | data3 = graph2.run("MATCH (m:component)-[r]->(n:command) where m.name='mapreduce' RETURN n").data() 46 | mapreduce_node = graph.nodes.match("技术",name="mapreduce").first() 47 | for ele in data3: 48 | new_data3 = build_nodes(ele) 49 | name=new_data3['name'] 50 | node = graph.nodes.match("命令",name=name,platform='hadoop').first() 51 | ab2 = Relationship(mapreduce_node, "技术有关的命令", node) 52 | graph.create(ab2) -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/hadoop_linux_component.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | from py2neo import Graph,Node,Relationship 3 | 4 | def build_nodes(nodes_record): 5 | data = {"id": str(nodes_record.get('n').identity), 6 | "label": next(iter(nodes_record.get('n').labels))} 7 | data.update(nodes_record.get('n')) 8 | return data 9 | 10 | #登录neo4j 11 | graph = Graph("http://localhost:7474",auth=("neo4j","12345")) 12 | 13 | graph2 = Graph("http://121.36.99.228:7474",auth=("neo4j","SSPKUsspku12345")) 14 | 15 | data = graph2.run("MATCH (m:component)-[r]->(n:component) where m.name='component' RETURN n").data() 16 | 17 | hadoop_node = graph.nodes.match("技术",name="hadoop").first() 18 | for ele in data: 19 | new_data = build_nodes(ele) 20 | component_node = graph.nodes.match("技术",name=new_data['name']).first() 21 | if component_node: 22 | component_node['platform']='hadoop' 23 | component_node['detail'] = new_data['detail'] 24 | ab2 = Relationship(hadoop_node, "相关组件", component_node) 25 | graph.create(ab2) 26 | else: 27 | com_node = Node("技术", name=new_data['name'],detail=new_data['detail'],platform='hadoop') 28 | ab2 = Relationship(hadoop_node, "相关组件", com_node) 29 | graph.create(ab2) 30 | 31 | data = graph2.run("MATCH (m:tools)-[r]->(n:tools) where m.name='tools' RETURN n").data() 32 | for ele in data: 33 | new_data = build_nodes(ele) 34 | component_node = graph.nodes.match("技术",name=new_data['name']).first() 35 | if component_node: 36 | component_node['platform']='hadoop' 37 | component_node['detail'] = new_data['detail'] 38 | ab2 = Relationship(hadoop_node, "相关组件", component_node) 39 | graph.create(ab2) 40 | else: 41 | com_node = Node("技术", name=new_data['name'],detail=new_data['detail'],platform='hadoop') 42 | ab2 = Relationship(hadoop_node, "相关组件", com_node) 43 | graph.create(ab2) -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/hadoop_linux_file.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | from py2neo import Graph,Node,Relationship 3 | 4 | def build_nodes(nodes_record): 5 | data = {"id": str(nodes_record.get('n').identity), 6 | "label": next(iter(nodes_record.get('n').labels))} 7 | data.update(nodes_record.get('n')) 8 | return data 9 | 10 | #登录neo4j 11 | graph = Graph("http://localhost:7474",auth=("neo4j","12345")) 12 | 13 | graph2 = Graph("http://121.36.99.228:7474",auth=("neo4j","SSPKUsspku12345")) 14 | 15 | data = graph2.run("MATCH (m:configuration_file)-[r]->(n:configuration_file) where m.name='configuration_file' RETURN n").data() 16 | 17 | hadoop_node = graph.nodes.match("技术",name="hadoop").first() 18 | yarn_node = graph.nodes.match("技术",name="yarn").first() 19 | 20 | for ele in data: 21 | new_data = build_nodes(ele) 22 | # print(new_data['name']) 23 | com_node = Node("配置文件", name=new_data['name'],detail=new_data['detail'],platform='hadoop',path=new_data['path']) 24 | ab2 = Relationship(hadoop_node, "技术有关配置文件", com_node) 25 | graph.create(ab2) 26 | 27 | data2 = graph2.run("MATCH (m:component)-[r]->(n:configuration_file) where m.name='yarn' RETURN n").data() 28 | 29 | for ele in data2: 30 | new_data2 = build_nodes(ele) 31 | com_node2 = Node("配置文件", name=new_data2['name'],detail=new_data2['detail'],platform='hadoop',path=new_data2['path']) 32 | ab2 = Relationship(yarn_node, "技术有关配置文件", com_node2) 33 | graph.create(ab2) -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/language2neo.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | #构造技术概念 3 | from urllib import request 4 | from urllib.request import build_opener,ProxyHandler 5 | import requests 6 | import random 7 | import jieba 8 | import re 9 | from bs4 import BeautifulSoup 10 | from py2neo import Graph,Node,Relationship 11 | 12 | # User_Agent列表 13 | user_agent_list = [ 14 | "Mozilla/5.0(Macintosh;IntelMacOSX10.6;rv:2.0.1)Gecko/20100101Firefox/4.0.1", 15 | "Mozilla/4.0(compatible;MSIE6.0;WindowsNT5.1)", 16 | "Opera/9.80(WindowsNT6.1;U;en)Presto/2.8.131Version/11.11", 17 | "Mozilla/5.0(Macintosh;IntelMacOSX10_7_0)AppleWebKit/535.11(KHTML,likeGecko)Chrome/17.0.963.56Safari/535.11", 18 | "Mozilla/4.0(compatible;MSIE7.0;WindowsNT5.1)", 19 | "Mozilla/4.0(compatible;MSIE7.0;WindowsNT5.1;Trident/4.0;SE2.XMetaSr1.0;SE2.XMetaSr1.0;.NETCLR2.0.50727;SE2.XMetaSr1.0)" 20 | ] 21 | 22 | # 产生一个随机User-Agent 23 | headers ={ 24 | 'User-Agent':random.choice(user_agent_list), 25 | 'Connection': 'close' 26 | } 27 | 28 | graph = Graph("http://localhost:7474",auth=("neo4j","12345")) 29 | 30 | root_node = graph.nodes.match("语言和框架",name="语言和框架").first() 31 | 32 | #爬取每个技术的属性(包含爬虫) 33 | def tech_detail(url,tech_name): 34 | # print(url2) 35 | req2 = requests.get(url,headers=headers) 36 | req2.encoding = 'utf8' 37 | # 发送请求 38 | soup2 = BeautifulSoup(req2.text, 'html.parser') 39 | index = soup2.select('.baikeLogo') 40 | #创建图谱节点 41 | node=Node("语言和框架", name=tech_name) 42 | if not index: 43 | name = soup2.select('.basic-info dt') 44 | val = soup2.select('.basic-info dd') 45 | i = 0 46 | while i < len(name): 47 | if name[i].get_text().replace('\xa0',''): 48 | v = val[i].select('a') 49 | if v: 50 | if v[0].get_text().replace('\xa0',''): 51 | # print(name[i].get_text()+"::"+v[0].get_text()) 52 | node[name[i].get_text().replace('\xa0','')] = v[0].get_text() 53 | else: 54 | # print((name[i].get_text()+"::"+val[i].get_text().replace("\n",""))) 55 | node[name[i].get_text().replace('\xa0','')]=val[i].get_text().replace("\n","") 56 | else: 57 | node[name[i].get_text().replace('\xa0','')] = val[i].get_text().replace("\n","") 58 | # print(name[i].get_text()+"::"+str(val[i])) 59 | i = i + 1 60 | # print(node) 61 | ab = Relationship(root_node, "包含语言或框架", node) 62 | graph.create(ab) 63 | 64 | # node = graph.nodes.match("技术",name="java").first() 65 | 66 | # if node: 67 | # print(node) 68 | 69 | # tech_detail("https://baike.baidu.com/item/git","git") 70 | # tech_detail("https://baike.baidu.com/item/mysql","mysql") 71 | 72 | 73 | 74 | f=open("./file/Languages.txt","r",encoding='utf8') 75 | try: 76 | content = f.read() 77 | tech_list = content.split('\n') 78 | for ele in tech_list: 79 | ele = ele.strip().lower() 80 | 81 | node = graph.nodes.match("语言和框架",name=ele).first() 82 | if not node: 83 | tech_detail("https://baike.baidu.com/item/"+str(ele.replace(" ","%20")),str(ele)) 84 | 85 | finally: 86 | f.close() -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/linux_serv_comd.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | #此脚本式为linux中的命令和服务建立关系(当前neo4j中只命令和服务概念,只有Linux相关知识) 3 | from bs4 import BeautifulSoup 4 | from py2neo import Graph,Node,Relationship 5 | 6 | def create_or_fail(start_node_name, end_node_name): 7 | r="MATCH (n:`服务`)-[r]-(m:`命令`) where n.name='"+start_node_name+"' and m.name='"+end_node_name+"' RETURN type(r)" 8 | a=graph.run(r) 9 | if a: 10 | return True 11 | else: 12 | return False 13 | 14 | #登录neo4j 15 | graph = Graph("http://localhost:7474",auth=("neo4j","12345")) 16 | 17 | f = open("./file/linux_serv_comd_dic.txt") 18 | try: 19 | content = eval(f.read()) 20 | for ele in content: 21 | if content[ele]: 22 | # print(ele) 23 | serv_node = graph.nodes.match("服务",name=str(ele)).first() 24 | # print(serv) 25 | for comd in content[ele]: 26 | if content[ele][comd] - 6 > 0: 27 | comd_node = graph.nodes.match("命令",name=str(comd)).first() 28 | # print(comd) 29 | a = create_or_fail(str(ele),str(comd)) 30 | print(str(ele)+":::"+str(comd)+":::"+str(a)) 31 | if a: 32 | ab = Relationship(serv_node, "服务使用命令", comd_node) 33 | graph.create(ab) 34 | finally: 35 | f.close() 36 | 37 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/linux_serv_file2neo.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | #此脚本式为linux中的配置文件和服务建立关系(当前neo4j中只命令和服务概念,只有Linux相关知识) 3 | from bs4 import BeautifulSoup 4 | from py2neo import Graph,Node,Relationship 5 | 6 | def create_or_fail(start_node_name, file): 7 | r="MATCH (n:`服务`)-[r]-(m:`配置文件`) where n.name='"+start_node_name+"' and m.path=~'.*"+file+".*' RETURN type(r)" 8 | if r: 9 | return True 10 | else: 11 | return False 12 | 13 | #登录neo4j 14 | graph = Graph("http://localhost:7474",auth=("neo4j","12345")) 15 | root = graph.nodes.match("配置文件",name='配置文件').first() 16 | f = open("./file/linux_serv_file_dic.txt") 17 | try: 18 | content = eval(f.read()) 19 | for ele in content: 20 | if content[ele]: 21 | # print(ele) 22 | serv_node = graph.nodes.match("服务",name=str(ele)).first() 23 | # print(serv) 24 | for file in content[ele]: 25 | if content[ele][file]>=3: 26 | file_cypher="match (m:`配置文件`) where m.path=~'.*"+file+".*' return m" 27 | a = graph.run(file_cypher) 28 | if list(a): 29 | ab = Relationship(serv_node, "服务使用配置文件", a[0]) 30 | graph.create(ab) 31 | else: 32 | file_node=Node("配置文件", name=file,path=file) 33 | bb = Relationship(root, "包含", file_node) 34 | abc = Relationship(serv_node, "服务使用配置文件", file_node) 35 | graph.create(abc) 36 | graph.create(bb) 37 | finally: 38 | f.close() 39 | 40 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/min_edit.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | def edit(str1, str2): 3 | matrix = [[i + j for j in range(len(str2) + 1)] for i in range(len(str1) + 1)] 4 | 5 | for i in range(1, len(str1) + 1): 6 | for j in range(1, len(str2) + 1): 7 | if str1[i - 1] == str2[j - 1]: 8 | d = 0 9 | else: 10 | d = 1 11 | matrix[i][j] = min(matrix[i - 1][j] + 1, matrix[i][j - 1] + 1, matrix[i - 1][j - 1] + d) 12 | 13 | return matrix[len(str1)][len(str2)] -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/qulity_ex.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | from urllib import request 3 | from urllib.request import build_opener,ProxyHandler 4 | from bs4 import BeautifulSoup 5 | import requests 6 | import random 7 | import jieba 8 | import jieba.analyse 9 | 10 | 11 | # User_Agent列表 12 | user_agent_list = [ 13 | "Mozilla/5.0(Macintosh;IntelMacOSX10.6;rv:2.0.1)Gecko/20100101Firefox/4.0.1", 14 | "Mozilla/4.0(compatible;MSIE6.0;WindowsNT5.1)", 15 | "Opera/9.80(WindowsNT6.1;U;en)Presto/2.8.131Version/11.11", 16 | "Mozilla/5.0(Macintosh;IntelMacOSX10_7_0)AppleWebKit/535.11(KHTML,likeGecko)Chrome/17.0.963.56Safari/535.11", 17 | "Mozilla/4.0(compatible;MSIE7.0;WindowsNT5.1)", 18 | "Mozilla/4.0(compatible;MSIE7.0;WindowsNT5.1;Trident/4.0;SE2.XMetaSr1.0;SE2.XMetaSr1.0;.NETCLR2.0.50727;SE2.XMetaSr1.0)" 19 | ] 20 | 21 | # 产生一个随机User-Agent 22 | headers ={ 23 | 'User-Agent':random.choice(user_agent_list), 24 | "Access-Control-Allow-Origin":"*", 25 | 'Connection': 'close' 26 | } 27 | 28 | def get_article_url(url,ele,dic): 29 | req = requests.get(url,headers=headers) 30 | soup = BeautifulSoup(req.text, 'html.parser') 31 | index = soup.select('span[class="sorryTxt"]') 32 | if index: 33 | # print(index[0].get_text()) 34 | return 1 35 | article = soup.select('div[class="para"]') 36 | content = "" 37 | for ele in article: 38 | content = content + ele.get_text().strip() 39 | tags = jieba.analyse.extract_tags(content,allowPOS=('ns', 'n', 'vn'),topK=50) 40 | tags2 = jieba.analyse.textrank(content, topK=50, allowPOS=('ns', 'n', 'vn')) 41 | res = list(set(tags).intersection(set(tags2))) 42 | # print(type(tags)) 43 | for ele in res: 44 | add2dic(ele,dic) 45 | # print(",".join(tags)) 46 | 47 | def add2dic(tag,dic): 48 | if tag in dic: 49 | dic[tag] = dic[tag] + 1 50 | else: 51 | dic[tag] = 1 52 | 53 | 54 | f = open("./file/technology2.txt",encoding='utf8') 55 | i = 1 56 | dic={} 57 | try: 58 | content=f.read().split("\n") 59 | for ele in content: 60 | if i < 500: 61 | ele = ele.lower() 62 | tech = ele.replace(" ","%20") 63 | url = 'https://baike.baidu.com/item/'+tech 64 | a = get_article_url(url,ele,dic) 65 | if a == 1: 66 | continue 67 | else: 68 | i = i + 1 69 | else: 70 | break 71 | finally: 72 | f.close() 73 | for w in sorted(dic, key=dic.get, reverse=True): 74 | print (w+":::"+str(dic[w])) 75 | # get_article_url('https://baike.baidu.com/item/Incremental%20data%20warehousing','java',dic) 76 | # get_article_url('https://baike.baidu.com/item/java','java',dic) 77 | # print(dic) 78 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/readme.doc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/图谱构建脚本/本地_爬虫+模板/readme.doc -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/readme.md: -------------------------------------------------------------------------------- 1 | # 图谱构建 2 | *** 3 | 本文图谱通过编写python脚本的方式抽取知识,在本文件加下是python脚本,在file文件夹下的是知识整理。 4 | 有很多python脚本文件,大部分是没啥实际作用的,有的是我做实验的脚本,最后没有用在项目里,有的是处理数据存入neo4j的脚本。 5 | 脚本 6 | 以下是部分处理脚本,有些没有在我这一版图谱中用上,但是不代表它们未来不会用上,目前这一版知识图谱是为了完成知识介绍任务构造的 7 | *** 8 | (废弃)Scrapy_test文件夹及里面的文件 9 | 这个文件夹下的程序都没有实际作用,是我做爬虫实验用的 10 | 11 | (废弃)agreement.py是把协议实体写入数据库的脚本 12 | 协议实体在后期被去掉了 13 | 14 | Change_node_param.py与Change_node_param2.py是统一知识图谱中命令参数格式 15 | 16 | comd_detail2neo.py有些命令没有介绍,把用模板抽取的介绍录入neo4j 17 | 18 | (实验)entity_data.py把实体的属性信息录入mysql 19 | 这个实验需要通过统计定义实体属性,统计python代码在另一个脚本,最终没有利用 20 | 21 | (实验)entity_solid.py根据实体属性计算实体对齐,成对匹配 22 | 这个计算过程是基于entity_data.py把实体属性录入mysql后做的,没有被利用 23 | 24 | (测试)file_count.py计算爬虫中文件的数量,没啥用,当初需要有文件总数 25 | 26 | get_neo4j_nodes.py获取neo4j中没有介绍的命令实体名称,并打印出来 27 | 这个脚本是想得到没有介绍属性或是介绍属性的value为空的命令实体,修改某个字段可以变成获取服务的实体,或是获取文件的实体,这一脚本就够用了 28 | 29 | hadoop_linux_command.py这个脚本是录入hadoop,hdfs,yarn,mapreduce相关命令 30 | 当时有一个外网的服务器,从那个服务器上读取数据,录入到本地的服务器上 31 | 32 | hadoop_linux_component.py这个脚本是录入组件信息的,和上一个脚本一样 33 | 34 | hadoop_linux_file.py这个脚本是录入配置文件信息的,和上一个脚本一样 35 | 36 | (测试)jiebatest.py用来写分词测试的脚本,基本上跟jieba分词的测试都先在这个脚本上本地测试一下 37 | 38 | KG_introduce.py根据规则模板爬取实体介绍 39 | 这个脚本会把爬取的数据用log的方式存入serv_detail.log,人工处理一下格式,在通过comd_detail2neo.py录入neo4j 40 | 41 | (实验,废弃)language2neo.py爬取语言&框架实体的百科消息盒子信息,主要是做属性存入neo4j中 42 | 43 | (改)linux_serv_comd.py为linux中的命令和服务建立关系,爬取的内容在linux_serv_comd_dic.txt,在另一个脚本中,优化了抽取规则方法 44 | 45 | (改)linux_serv_file.py这个脚本用来爬取服务与配置文件的关系,并对关系个数做统计,结果存入serv_file_dic.txt,在另一个脚本中,优化了抽取规则方法 46 | 47 | linux_serv_file2neo.py为linux中的配置文件和服务建立关系,存入neo4j 48 | 49 | (改)linux_service_commad.py抽取服务与命令的关系,在另一个脚本中会对抽取规则做改动 50 | 51 | min_edit.py最小编辑距离 52 | 53 | (废弃)new_entity_ex.py实体抽取,没啥用 54 | 55 | (实验)qulity_ex.py根据统计学原理,分析实体属性。最后没用上,是一个方向吧 56 | 获取某一类实体的百科页面,统计百科页面中出现次数最多的名词,过滤这些名词,选择可以成为属性的名词。 57 | 58 | serv_commd_predict.py设定阈值,确定测试集中准确率最高是阈值是多少。 59 | 这个是用来预测关系抽取中阈值设置的,只需修改测试集和参数,就可以确定不同阈值 60 | 61 | serv_detail2neo.py服务的详情录入neo4j。 62 | 有些服务实体没有详情,把服务详情录入neo4j,应该会有另一个脚本爬取详情的信息。 63 | 64 | service.py服务实体信息录入neo4j。 65 | 66 | service_comd_file.py这个是最终版本,抽取服务和命令、配置文件的关系,由于是根据技术抽取脚本改的,里面很多变量名称还是看着不太对劲。 67 | 68 | service_relation2neo.py服务与命令,服务于配置文件,录入neo4j 69 | 70 | (改)service_spider.py服务关系爬取脚本,比较早的版本 71 | 72 | (废弃)snap_test.py没啥用,忘了这个是干啥的了 73 | 74 | Solid_alignment.py局部集体对齐算法,这个脚本改一些变量就可以对齐其他的实体,只考虑实体名称 75 | 76 | (实验)tech_atr.py这个脚本是根据统计获取的实体属性,抽取属性值,最后没用上,感觉是一个方向吧,反正我这一版没有用上 77 | 78 | tech_comd_dic.py技术实体关系抽取。最终版 79 | 80 | tech_relation2neo4j.py技术实体关系录入neo4j 81 | 82 | (实验)tech_similar_count.py忘了这个要干啥了,好像是处理实体属性的 83 | 84 | tech2neo.py把技术实体录入neo4j,录入过程中爬取了百科消息盒子作为属性 85 | 86 | (实验)translate2.py翻译插件实验 87 | 88 | (废弃)windows_serv2neo4j.py想录入windows的服务,最后没用上 89 | 90 | (实验)words_similar.py句子相似度实验,TF-IDF 91 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/__init__.py -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/__pycache__/settings.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/__pycache__/settings.cpython-37.pyc -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/items.py: -------------------------------------------------------------------------------- 1 | # Define here the models for your scraped items 2 | # 3 | # See documentation in: 4 | # https://docs.scrapy.org/en/latest/topics/items.html 5 | 6 | import scrapy 7 | 8 | 9 | class KgTestItem(scrapy.Item): 10 | # define the fields for your item here like: 11 | # name = scrapy.Field() 12 | pass 13 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/middlewares.py: -------------------------------------------------------------------------------- 1 | # Define here the models for your spider middleware 2 | # 3 | # See documentation in: 4 | # https://docs.scrapy.org/en/latest/topics/spider-middleware.html 5 | 6 | from scrapy import signals 7 | 8 | # useful for handling different item types with a single interface 9 | from itemadapter import is_item, ItemAdapter 10 | 11 | 12 | class KgTestSpiderMiddleware: 13 | # Not all methods need to be defined. If a method is not defined, 14 | # scrapy acts as if the spider middleware does not modify the 15 | # passed objects. 16 | 17 | @classmethod 18 | def from_crawler(cls, crawler): 19 | # This method is used by Scrapy to create your spiders. 20 | s = cls() 21 | crawler.signals.connect(s.spider_opened, signal=signals.spider_opened) 22 | return s 23 | 24 | def process_spider_input(self, response, spider): 25 | # Called for each response that goes through the spider 26 | # middleware and into the spider. 27 | 28 | # Should return None or raise an exception. 29 | return None 30 | 31 | def process_spider_output(self, response, result, spider): 32 | # Called with the results returned from the Spider, after 33 | # it has processed the response. 34 | 35 | # Must return an iterable of Request, or item objects. 36 | for i in result: 37 | yield i 38 | 39 | def process_spider_exception(self, response, exception, spider): 40 | # Called when a spider or process_spider_input() method 41 | # (from other spider middleware) raises an exception. 42 | 43 | # Should return either None or an iterable of Request or item objects. 44 | pass 45 | 46 | def process_start_requests(self, start_requests, spider): 47 | # Called with the start requests of the spider, and works 48 | # similarly to the process_spider_output() method, except 49 | # that it doesn’t have a response associated. 50 | 51 | # Must return only requests (not items). 52 | for r in start_requests: 53 | yield r 54 | 55 | def spider_opened(self, spider): 56 | spider.logger.info('Spider opened: %s' % spider.name) 57 | 58 | 59 | class KgTestDownloaderMiddleware: 60 | # Not all methods need to be defined. If a method is not defined, 61 | # scrapy acts as if the downloader middleware does not modify the 62 | # passed objects. 63 | 64 | @classmethod 65 | def from_crawler(cls, crawler): 66 | # This method is used by Scrapy to create your spiders. 67 | s = cls() 68 | crawler.signals.connect(s.spider_opened, signal=signals.spider_opened) 69 | return s 70 | 71 | def process_request(self, request, spider): 72 | # Called for each request that goes through the downloader 73 | # middleware. 74 | 75 | # Must either: 76 | # - return None: continue processing this request 77 | # - or return a Response object 78 | # - or return a Request object 79 | # - or raise IgnoreRequest: process_exception() methods of 80 | # installed downloader middleware will be called 81 | return None 82 | 83 | def process_response(self, request, response, spider): 84 | # Called with the response returned from the downloader. 85 | 86 | # Must either; 87 | # - return a Response object 88 | # - return a Request object 89 | # - or raise IgnoreRequest 90 | return response 91 | 92 | def process_exception(self, request, exception, spider): 93 | # Called when a download handler or a process_request() 94 | # (from other downloader middleware) raises an exception. 95 | 96 | # Must either: 97 | # - return None: continue processing this exception 98 | # - return a Response object: stops process_exception() chain 99 | # - return a Request object: stops process_exception() chain 100 | pass 101 | 102 | def spider_opened(self, spider): 103 | spider.logger.info('Spider opened: %s' % spider.name) 104 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/pipelines.py: -------------------------------------------------------------------------------- 1 | # Define your item pipelines here 2 | # 3 | # Don't forget to add your pipeline to the ITEM_PIPELINES setting 4 | # See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html 5 | 6 | 7 | # useful for handling different item types with a single interface 8 | from itemadapter import ItemAdapter 9 | 10 | 11 | class KgTestPipeline: 12 | def process_item(self, item, spider): 13 | return item 14 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/settings.py: -------------------------------------------------------------------------------- 1 | # Scrapy settings for KG_test project 2 | # 3 | # For simplicity, this file contains only settings considered important or 4 | # commonly used. You can find more settings consulting the documentation: 5 | # 6 | # https://docs.scrapy.org/en/latest/topics/settings.html 7 | # https://docs.scrapy.org/en/latest/topics/downloader-middleware.html 8 | # https://docs.scrapy.org/en/latest/topics/spider-middleware.html 9 | 10 | BOT_NAME = 'KG_test' 11 | 12 | SPIDER_MODULES = ['KG_test.spiders'] 13 | NEWSPIDER_MODULE = 'KG_test.spiders' 14 | 15 | 16 | # Crawl responsibly by identifying yourself (and your website) on the user-agent 17 | #USER_AGENT = 'KG_test (+http://www.yourdomain.com)' 18 | 19 | # Obey robots.txt rules 20 | ROBOTSTXT_OBEY = True 21 | 22 | # Configure maximum concurrent requests performed by Scrapy (default: 16) 23 | #CONCURRENT_REQUESTS = 32 24 | 25 | # Configure a delay for requests for the same website (default: 0) 26 | # See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay 27 | # See also autothrottle settings and docs 28 | #DOWNLOAD_DELAY = 3 29 | # The download delay setting will honor only one of: 30 | #CONCURRENT_REQUESTS_PER_DOMAIN = 16 31 | #CONCURRENT_REQUESTS_PER_IP = 16 32 | 33 | # Disable cookies (enabled by default) 34 | #COOKIES_ENABLED = False 35 | 36 | # Disable Telnet Console (enabled by default) 37 | #TELNETCONSOLE_ENABLED = False 38 | 39 | # Override the default request headers: 40 | #DEFAULT_REQUEST_HEADERS = { 41 | # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 42 | # 'Accept-Language': 'en', 43 | #} 44 | 45 | # Enable or disable spider middlewares 46 | # See https://docs.scrapy.org/en/latest/topics/spider-middleware.html 47 | #SPIDER_MIDDLEWARES = { 48 | # 'KG_test.middlewares.KgTestSpiderMiddleware': 543, 49 | #} 50 | 51 | # Enable or disable downloader middlewares 52 | # See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html 53 | #DOWNLOADER_MIDDLEWARES = { 54 | # 'KG_test.middlewares.KgTestDownloaderMiddleware': 543, 55 | #} 56 | 57 | # Enable or disable extensions 58 | # See https://docs.scrapy.org/en/latest/topics/extensions.html 59 | #EXTENSIONS = { 60 | # 'scrapy.extensions.telnet.TelnetConsole': None, 61 | #} 62 | 63 | # Configure item pipelines 64 | # See https://docs.scrapy.org/en/latest/topics/item-pipeline.html 65 | #ITEM_PIPELINES = { 66 | # 'KG_test.pipelines.KgTestPipeline': 300, 67 | #} 68 | 69 | # Enable and configure the AutoThrottle extension (disabled by default) 70 | # See https://docs.scrapy.org/en/latest/topics/autothrottle.html 71 | #AUTOTHROTTLE_ENABLED = True 72 | # The initial download delay 73 | #AUTOTHROTTLE_START_DELAY = 5 74 | # The maximum download delay to be set in case of high latencies 75 | #AUTOTHROTTLE_MAX_DELAY = 60 76 | # The average number of requests Scrapy should be sending in parallel to 77 | # each remote server 78 | #AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0 79 | # Enable showing throttling stats for every response received: 80 | #AUTOTHROTTLE_DEBUG = False 81 | 82 | # Enable and configure HTTP caching (disabled by default) 83 | # See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings 84 | #HTTPCACHE_ENABLED = True 85 | #HTTPCACHE_EXPIRATION_SECS = 0 86 | #HTTPCACHE_DIR = 'httpcache' 87 | #HTTPCACHE_IGNORE_HTTP_CODES = [] 88 | #HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage' 89 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/spiders/KG_spider.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import scrapy 3 | 4 | class KG_spider(scrapy.Spider): 5 | name="KG" 6 | start_urls = ["https://babelnet.org/synset?word=hadoop&lang=ZH&details=1&orig=hadoop"] 7 | def parse(self,response): 8 | print(response) -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/spiders/__init__.py: -------------------------------------------------------------------------------- 1 | # This package will contain the spiders of your Scrapy project 2 | # 3 | # Please refer to the documentation for information on how to create and manage 4 | # your spiders. 5 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/spiders/__pycache__/KG_spider.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/spiders/__pycache__/KG_spider.cpython-37.pyc -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/spiders/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/KG_test/spiders/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/scrapy_test/KG_test/scrapy.cfg: -------------------------------------------------------------------------------- 1 | # Automatically created by: scrapy startproject 2 | # 3 | # For more information about the [deploy] section see: 4 | # https://scrapyd.readthedocs.io/en/latest/deploy.html 5 | 6 | [settings] 7 | default = KG_test.settings 8 | 9 | [deploy] 10 | #url = http://localhost:6800/ 11 | project = KG_test 12 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/serv_commd_predict.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | #准备计算服务与命令关系的评价 3 | #准确率、召回率、F值 4 | import copy 5 | f=open("./file/serv_dic.txt","r+") 6 | f2=open("./file/service_comd_test.txt","r+") 7 | 8 | cor_list=[] 9 | ret_list=[] 10 | f1_list=[] 11 | 12 | try: 13 | content = eval(f.read()) 14 | content2 = copy.deepcopy(content) 15 | test = eval(f2.read()) 16 | keng = 1 #阈值 17 | while keng < 10: 18 | out = 0 #提取出来的信息条数 19 | for ele in content: 20 | if content[ele]: 21 | for com in content[ele]: 22 | content[ele][com] = content[ele][com] - keng 23 | if content[ele][com] > 0: 24 | out = out + 1 25 | else: 26 | out = out + 1 27 | 28 | correct = 0#提取出来的信息正确的条数 29 | count = 0 #测试样本中信息的条数 30 | for ele in test: 31 | if test[ele]: 32 | for a in test[ele]: 33 | count = count + 1 34 | else: 35 | count = count + 1 36 | 37 | for ele in content: 38 | if content[ele]: 39 | for key,val in content[ele].items(): 40 | if val > 0 and key in test[ele]: 41 | correct = correct + 1 42 | else: 43 | if not test[ele]: 44 | correct = correct + 1 45 | #准确率:(正确预测信息条数/所有预测信息条数)反映了被分类器判定的正例中真正的正例样本的比重 46 | cor_per = round(correct*100/out,2) 47 | #召回率:(正确预测信息条数/所有正确信息条数(一般时测试集条数))反映了被正确判定的正例占总的正例的比重 48 | ret_per = round(correct*100/count,2) 49 | # 50 | f1_per = round(cor_per*ret_per*2/(cor_per+ret_per),2) 51 | cor_list.append(cor_per) 52 | ret_list.append(ret_per) 53 | f1_list.append(f1_per) 54 | content = copy.deepcopy(content2) 55 | keng = keng + 1 56 | 57 | 58 | print(cor_list) 59 | print(ret_list) 60 | print(f1_list) 61 | 62 | # print(content2) 63 | 64 | 65 | 66 | 67 | finally: 68 | f.close() 69 | f2.close() -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/serv_detail2neo.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | from py2neo import Graph,Node,Relationship 3 | 4 | f=open("./file/serv_detail.txt","r",encoding='utf8') 5 | 6 | #登录neo4j 7 | graph = Graph("http://localhost:7474",auth=("neo4j","SSPKUsspku12345")) 8 | 9 | try: 10 | content = f.read() 11 | relation_list = content.split('\n') 12 | # print(len(relation_list)) 13 | i = 0 14 | for evey_list in relation_list: 15 | serv_list = evey_list.split(":::") 16 | name = serv_list[0] 17 | serv_dic = eval(serv_list[1]) 18 | serv_str = "" 19 | for k,v in serv_dic.items(): 20 | if k.startswith(name+"服务是") or k.startswith(name+"是"): 21 | if k == name+"是什么?": 22 | continue 23 | else: 24 | serv_str = serv_str+k if serv_str=="" else serv_str+"。"+k 25 | serv_node = graph.nodes.match("服务",name=name).first() 26 | serv_node['detail'] = serv_str 27 | graph.push(serv_node) 28 | 29 | 30 | 31 | finally: 32 | f.close() -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/service.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | from bs4 import BeautifulSoup 3 | from py2neo import Graph,Node,Relationship 4 | 5 | #登录neo4j 6 | graph = Graph("http://localhost:7474",auth=("neo4j","12345")) 7 | 8 | # a = Node("root", name="运维知识") 9 | # a = graph.nodes.match("root",name="运维知识").first() 10 | # # b1 = Node("命令", name="命令") 11 | # # ab1 = Relationship(a, "包含", b1) 12 | # # graph.create(ab1) 13 | 14 | # b2 = Node("服务", name="服务") 15 | # ab2 = Relationship(a, "包含", b2) 16 | # graph.create(ab2) 17 | 18 | root_node = graph.nodes.match("服务",name="服务").first() 19 | f=open("./service.txt","r",encoding="utf8") 20 | try: 21 | for line in f.readlines(): 22 | ele = line.split('\t') 23 | order_node=Node("服务", name=ele[0], detail=ele[1],platform="linux") 24 | ab = Relationship(root_node, "包含服务", order_node) 25 | graph.create(ab) 26 | finally: 27 | f.close() -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/service_relation2neo.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | from py2neo import Graph,Node,Relationship 3 | 4 | #登录neo4j 5 | graph = Graph("http://localhost:7474",auth=("neo4j","SSPKUsspku12345")) 6 | 7 | f=open("./file/service_comd_file.txt","r",encoding='utf8') 8 | command_root = graph.nodes.match("命令",name="命令").first() 9 | serv_root = graph.nodes.match("服务",name="服务").first() 10 | file_root = graph.nodes.match("配置文件",name="配置文件").first() 11 | tech_root = graph.nodes.match("技术",name="技术").first() 12 | com_temp = 0 13 | serv_temp = 0 14 | file_temp = 0 15 | 16 | try: 17 | content = f.read() 18 | relation_list = content.split('\n') 19 | # print(len(relation_list)) 20 | i = 0 21 | for evey_list in relation_list: 22 | # print(i%4) 23 | if (i%3) == 0: 24 | com_list = evey_list.split(":::") 25 | name = com_list[0] 26 | com_dic = eval(com_list[1]) 27 | tech_node = graph.nodes.match("服务",name=name).first() 28 | if not tech_node: 29 | tech_node = Node("服务", name=name,platform='linux') 30 | aa = Relationship(tech_root, "包含服务", tech_node) 31 | graph.create(aa) 32 | 33 | for k,v in com_dic.items(): 34 | if v > com_temp: 35 | com_node = graph.nodes.match("命令",name=k,platform='linux').first() 36 | print(name) 37 | # print(com_node) 38 | if com_node: 39 | data =graph.run("match (n:`服务`)-[r:`服务使用命令`]->(m:`命令`) where n.name='"+name+"' and m.name='"+k+"' return r") 40 | if not data: 41 | ab = Relationship(tech_node, "服务使用命令", com_node) 42 | graph.create(ab) 43 | else: 44 | com_node = Node("命令", name=k,platform='linux') 45 | ab1 = Relationship(command_root, "包含命令", com_node) 46 | graph.create(ab1) 47 | ab2 = Relationship(tech_node, "服务使用命令", com_node) 48 | graph.create(ab2) 49 | 50 | 51 | elif (i%3) == 1: 52 | file_list = evey_list.split(":::") 53 | name = file_list[0] 54 | # print(file_list[0]) 55 | file_dic = eval(file_list[1]) 56 | tech_node = graph.nodes.match("服务",name=name).first() 57 | if not tech_node: 58 | tech_node = Node("服务", name=name,platform='linux') 59 | aa = Relationship(tech_root, "包含服务", tech_node) 60 | graph.create(aa) 61 | for k,v in file_dic.items(): 62 | if v > file_temp and "//" not in k and " " not in k and "www" not in k and k is not '/' and k is not '/.': 63 | file_node = graph.nodes.match("配置文件",name=k).first() 64 | print(name) 65 | if file_node: 66 | data =graph.run("match (n:`服务`)-[r:`服务使用配置文件`]->(m:`配置文件`) where n.name='"+name+"' and m.name='"+k+"' return r") 67 | if not data: 68 | ad = Relationship(tech_node, "服务使用配置文件", file_node) 69 | graph.create(ad) 70 | else: 71 | file_node = Node("配置文件", name=k, path=k,platform='linux') 72 | ad1 = Relationship(file_root, "包含配置文件", file_node) 73 | graph.create(ad1) 74 | ad2 = Relationship(tech_node, "服务使用配置文件", file_node) 75 | graph.create(ad2) 76 | else: 77 | i = i + 1 78 | continue 79 | i = i + 1 80 | 81 | finally: 82 | f.close() -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/sftp-config.json: -------------------------------------------------------------------------------- 1 | { 2 | // The tab key will cycle through the settings when first created 3 | // Visit http://wbond.net/sublime_packages/sftp/settings for help 4 | 5 | // sftp, ftp or ftps 6 | "type": "sftp", 7 | 8 | "save_before_upload": true, 9 | "upload_on_save": false, 10 | "sync_down_on_open": false, 11 | "sync_skip_deletes": false, 12 | "sync_same_age": true, 13 | "confirm_downloads": false, 14 | "confirm_sync": true, 15 | "confirm_overwrite_newer": false, 16 | 17 | "host": "example.com", 18 | "user": "username", 19 | //"password": "password", 20 | //"port": "22", 21 | 22 | "remote_path": "/example/path/", 23 | "ignore_regexes": [ 24 | "\\.sublime-(project|workspace)", "sftp-config(-alt\\d?)?\\.json", 25 | "sftp-settings\\.json", "/venv/", "\\.svn/", "\\.hg/", "\\.git/", 26 | "\\.bzr", "_darcs", "CVS", "\\.DS_Store", "Thumbs\\.db", "desktop\\.ini" 27 | ], 28 | //"file_permissions": "664", 29 | //"dir_permissions": "775", 30 | 31 | //"extra_list_connections": 0, 32 | 33 | "connect_timeout": 30, 34 | //"keepalive": 120, 35 | //"ftp_passive_mode": true, 36 | //"ftp_obey_passive_host": false, 37 | //"ssh_key_file": "~/.ssh/id_rsa", 38 | //"sftp_flags": ["-F", "/path/to/ssh_config"], 39 | 40 | //"preserve_modification_times": false, 41 | //"remote_time_offset_in_hours": 0, 42 | //"remote_encoding": "utf-8", 43 | //"remote_locale": "C", 44 | //"allow_config_upload": false, 45 | } 46 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/snap_test.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | from py2neo import Graph,Node,Relationship 3 | 4 | 5 | 6 | graph = Graph("http://localhost:7474",auth=("neo4j","12345")) 7 | com_node = graph.nodes.match("命令",name='snap').first() 8 | 9 | data =graph.run("match (n:`技术`)-[r:`技术有关的服务`]->(m:`服务`) where n.name='hadoop' and m.name='namanode' return r") 10 | if data: 11 | print(data) 12 | -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/tech2neo.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | #构造技术概念 3 | from urllib import request 4 | from urllib.request import build_opener,ProxyHandler 5 | import requests 6 | import random 7 | import jieba 8 | import re 9 | from bs4 import BeautifulSoup 10 | from py2neo import Graph,Node,Relationship 11 | 12 | # User_Agent列表 13 | user_agent_list = [ 14 | "Mozilla/5.0(Macintosh;IntelMacOSX10.6;rv:2.0.1)Gecko/20100101Firefox/4.0.1", 15 | "Mozilla/4.0(compatible;MSIE6.0;WindowsNT5.1)", 16 | "Opera/9.80(WindowsNT6.1;U;en)Presto/2.8.131Version/11.11", 17 | "Mozilla/5.0(Macintosh;IntelMacOSX10_7_0)AppleWebKit/535.11(KHTML,likeGecko)Chrome/17.0.963.56Safari/535.11", 18 | "Mozilla/4.0(compatible;MSIE7.0;WindowsNT5.1)", 19 | "Mozilla/4.0(compatible;MSIE7.0;WindowsNT5.1;Trident/4.0;SE2.XMetaSr1.0;SE2.XMetaSr1.0;.NETCLR2.0.50727;SE2.XMetaSr1.0)" 20 | ] 21 | 22 | # 产生一个随机User-Agent 23 | headers ={ 24 | 'User-Agent':random.choice(user_agent_list), 25 | 'Connection': 'close' 26 | } 27 | 28 | graph = Graph("http://localhost:7474",auth=("neo4j","12345")) 29 | 30 | root_node = graph.nodes.match("技术",name="技术").first() 31 | 32 | #爬取每个技术的属性(包含爬虫) 33 | def tech_detail(url,tech_name): 34 | # print(url2) 35 | req2 = requests.get(url,headers=headers) 36 | req2.encoding = 'utf8' 37 | # 发送请求 38 | soup2 = BeautifulSoup(req2.text, 'html.parser') 39 | index = soup2.select('.baikeLogo') 40 | #创建图谱节点 41 | node=Node("技术", name=tech_name) 42 | if not index: 43 | name = soup2.select('.basic-info dt') 44 | val = soup2.select('.basic-info dd') 45 | i = 0 46 | while i < len(name): 47 | if name[i].get_text().replace('\xa0',''): 48 | v = val[i].select('a') 49 | if v: 50 | if v[0].get_text().replace('\xa0',''): 51 | # print(name[i].get_text()+"::"+v[0].get_text()) 52 | node[name[i].get_text().replace('\xa0','')] = v[0].get_text() 53 | else: 54 | # print((name[i].get_text()+"::"+val[i].get_text().replace("\n",""))) 55 | node[name[i].get_text().replace('\xa0','')]=val[i].get_text().replace("\n","") 56 | else: 57 | node[name[i].get_text().replace('\xa0','')] = val[i].get_text().replace("\n","") 58 | # print(name[i].get_text()+"::"+str(val[i])) 59 | i = i + 1 60 | # print(node) 61 | ab = Relationship(root_node, "包含技术", node) 62 | graph.create(ab) 63 | 64 | # node = graph.nodes.match("技术",name="java").first() 65 | 66 | # if node: 67 | # print(node) 68 | 69 | # tech_detail("https://baike.baidu.com/item/git","git") 70 | # tech_detail("https://baike.baidu.com/item/mysql","mysql") 71 | 72 | 73 | 74 | f=open("./file/Technologies.txt","r",encoding='utf8') 75 | try: 76 | content = f.read() 77 | tech_list = content.split('\n') 78 | for ele in tech_list: 79 | ele = ele.strip().lower() 80 | 81 | node = graph.nodes.match("技术",name=ele).first() 82 | if not node: 83 | tech_detail("https://baike.baidu.com/item/"+str(ele.replace(" ","%20")),str(ele)) 84 | 85 | finally: 86 | f.close() -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/tech_relation2neo4j.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | from py2neo import Graph,Node,Relationship 3 | 4 | #登录neo4j 5 | graph = Graph("http://localhost:7474",auth=("neo4j","12345")) 6 | 7 | f=open("./file/abc.txt","r",encoding='utf8') 8 | command_root = graph.nodes.match("命令",name="命令").first() 9 | serv_root = graph.nodes.match("服务",name="服务").first() 10 | file_root = graph.nodes.match("配置文件",name="配置文件").first() 11 | tech_root = graph.nodes.match("技术",name="技术").first() 12 | com_temp = 1 13 | serv_temp = 1 14 | file_temp = 1 15 | 16 | try: 17 | content = f.read() 18 | relation_list = content.split('\n') 19 | # print(len(relation_list)) 20 | i = 0 21 | for evey_list in relation_list: 22 | # print(i%4) 23 | if (i%4) == 0: 24 | com_list = evey_list.split(":::") 25 | name = com_list[0] 26 | com_dic = eval(com_list[1]) 27 | tech_node = graph.nodes.match("技术",name=name).first() 28 | if not tech_node: 29 | tech_node = Node("技术", name=name,platform='linux') 30 | aa = Relationship(tech_root, "包含技术", tech_node) 31 | graph.create(aa) 32 | 33 | for k,v in com_dic.items(): 34 | if v > com_temp: 35 | com_node = graph.nodes.match("命令",name=k,platform='linux').first() 36 | print(name) 37 | # print(com_node) 38 | if com_node: 39 | data =graph.run("match (n:`技术`)-[r:`技术有关的命令`]->(m:`命令`) where n.name='"+name+"' and m.name='"+k+"' return r") 40 | if not data: 41 | ab = Relationship(tech_node, "技术使用命令", com_node) 42 | graph.create(ab) 43 | else: 44 | com_node = Node("命令", name=k,platform='linux') 45 | ab1 = Relationship(command_root, "包含命令", com_node) 46 | graph.create(ab1) 47 | ab2 = Relationship(tech_node, "技术有关的命令", com_node) 48 | graph.create(ab2) 49 | 50 | 51 | elif (i%4) == 1: 52 | serv_list = evey_list.split(":::") 53 | name = serv_list[0] 54 | serv_dic = eval(serv_list[1]) 55 | tech_node = graph.nodes.match("技术",name=name).first() 56 | if not tech_node: 57 | tech_node = Node("技术", name=name,platform='linux') 58 | aa = Relationship(tech_root, "包含技术", tech_node) 59 | graph.create(aa) 60 | for k,v in serv_dic.items(): 61 | if v > serv_temp: 62 | serv_node = graph.nodes.match("服务",name=k).first() 63 | print(name) 64 | if serv_node: 65 | data =graph.run("match (n:`技术`)-[r:`技术有关的服务`]->(m:`服务`) where n.name='"+name+"' and m.name='"+k+"' return r") 66 | if not data: 67 | ac = Relationship(tech_node, "技术有关的服务", serv_node) 68 | graph.create(ac) 69 | else: 70 | serv_node = Node("服务", name=k,platform='linux') 71 | ac1 = Relationship(serv_root, "包含服务", serv_node) 72 | graph.create(ac1) 73 | ac2 = Relationship(tech_node, "技术有关的服务", serv_node) 74 | graph.create(ac2) 75 | 76 | elif (i%4) == 2: 77 | file_list = evey_list.split(":::") 78 | name = file_list[0] 79 | # print(file_list[0]) 80 | file_dic = eval(file_list[1]) 81 | tech_node = graph.nodes.match("技术",name=name).first() 82 | if not tech_node: 83 | tech_node = Node("技术", name=name,platform='linux') 84 | aa = Relationship(tech_root, "包含技术", tech_node) 85 | graph.create(aa) 86 | for k,v in file_dic.items(): 87 | if v > file_temp and "//" not in k and " " not in k and "www" not in k and k is not '/' and k is not '/.': 88 | file_node = graph.nodes.match("配置文件",name=k).first() 89 | print(name) 90 | if file_node: 91 | data =graph.run("match (n:`技术`)-[r:`技术有关的配置文件`]->(m:`配置文件`) where n.name='"+name+"' and m.name='"+k+"' return r") 92 | if not data: 93 | ad = Relationship(tech_node, "技术有关的配置文件", file_node) 94 | graph.create(ad) 95 | else: 96 | file_node = Node("配置文件", name=k, path=k,platform='linux') 97 | ad1 = Relationship(file_root, "包含配置文件", file_node) 98 | graph.create(ad1) 99 | ad2 = Relationship(tech_node, "技术有关的配置文件", file_node) 100 | graph.create(ad2) 101 | else: 102 | i = i + 1 103 | continue 104 | i = i + 1 105 | 106 | finally: 107 | f.close() -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/tech_similar_count.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | f=open("./file/tech_atr123.txt",encoding="utf8") 3 | res={} 4 | try: 5 | content = f.read() 6 | con_list = content.split('\n') 7 | for num,ele in enumerate(con_list): 8 | if num%4 == 0: 9 | edition = eval(ele) 10 | ed_key = list(edition.keys())[0] 11 | ed_val = edition[ed_key] 12 | ed_val_key = list(ed_val.keys()) 13 | ed_strs="" 14 | for ele in ed_val_key: 15 | ed_strs = ed_strs + ele +',' 16 | # print("版本:"+ed_strs) 17 | res[ed_key] = {} 18 | res[ed_key]['版本']=ed_strs 19 | elif num%4 == 1: 20 | person = eval(ele) 21 | per_key = list(person.keys())[0] 22 | per_val = person[per_key] 23 | per_val_key = list(per_val.keys()) 24 | per_strs="" 25 | for ele in per_val_key: 26 | per_strs = per_strs + ele +',' 27 | # print("创始人:"+per_strs) 28 | res[ed_key]['创始人']=per_strs 29 | elif num%4 == 2: 30 | web = eval(ele) 31 | web_key = list(web.keys())[0] 32 | web_val = web[web_key] 33 | web_val_key = list(web_val.keys()) 34 | web_strs="" 35 | for ele in web_val_key: 36 | web_strs = web_strs + ele +',' 37 | # print("官网:"+web_strs) 38 | res[ed_key]['官网']=web_strs 39 | elif num%4 == 3: 40 | continue 41 | # print("==============") 42 | # element = eval(ele) 43 | print(str(res)) 44 | finally: 45 | f.close() -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/translate2.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | from translate import Translator 4 | translator= Translator(from_lang="chinese",to_lang="english") 5 | translation = translator.translate("在anconda3下找到Anaconda Prompt终端平台,输入pip install translate,这里的translate包是微软的,翻译良好。等待安装完成即可") 6 | print(translation) -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/windows_serv2neo4j.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | from py2neo import Graph,Node,Relationship 3 | 4 | #登录neo4j 5 | graph = Graph("http://localhost:7474",auth=("neo4j","12345")) 6 | 7 | root_node = graph.nodes.match("服务",name="服务").first() 8 | 9 | f=open("./file/windows_serv.txt","r",encoding='utf8') 10 | try: 11 | content = f.read() 12 | serv_list = content.split('\n\n') 13 | # print(serv_list[0]) 14 | for serv in serv_list: 15 | item = serv.split('\n') 16 | for ele in item: 17 | ele2 = ele.split(':') 18 | print(ele2[0]) 19 | 20 | finally: 21 | f.close() -------------------------------------------------------------------------------- /图谱构建脚本/本地_爬虫+模板/words_similar.py: -------------------------------------------------------------------------------- 1 | # encoding=utf8 2 | from sklearn.feature_extraction.text import TfidfVectorizer 3 | import numpy as np 4 | from scipy.linalg import norm 5 | 6 | 7 | def tfidf_similarity(s1, s2): 8 | # 转化为TF矩阵 9 | cv = TfidfVectorizer(tokenizer=lambda s: s.split()) 10 | corpus = [s1, s2] 11 | vectors = cv.fit_transform(corpus).toarray() 12 | # 计算TF系数 13 | return np.dot(vectors[0], vectors[1]) / (norm(vectors[0]) * norm(vectors[1])) 14 | 15 | 16 | s1 = 'Run the job history server as an independent daemon.' 17 | s2 = 'Start JobHistoryServer.Usage: mapred historyserver' 18 | print(tfidf_similarity(s1, s2)) -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/.gitattributes: -------------------------------------------------------------------------------- 1 | *.js linguist-language=java *.css linguist-language=java *.html linguist-language=java -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/.gitignore: -------------------------------------------------------------------------------- 1 | /target/ 2 | !.mvn/wrapper/maven-wrapper.jar 3 | 4 | ### STS ### 5 | .apt_generated 6 | .classpath 7 | .factorypath 8 | .project 9 | .settings 10 | .springBeans 11 | .sts4-cache 12 | 13 | ### IntelliJ IDEA ### 14 | .idea 15 | *.iws 16 | *.iml 17 | *.ipr 18 | 19 | ### NetBeans ### 20 | /nbproject/private/ 21 | /build/ 22 | /nbbuild/ 23 | /dist/ 24 | /nbdist/ 25 | /.nb-gradle/ -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/knowledgegraph.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Navicat MySQL Data Transfer 3 | 4 | Source Server : 192.168.100.92 5 | Source Server Version : 80012 6 | Source Host : 192.168.100.92:3306 7 | Source Database : bd 8 | 9 | Target Server Type : MYSQL 10 | Target Server Version : 80012 11 | File Encoding : 65001 12 | 13 | Date: 2018-12-05 13:34:38 14 | */ 15 | 16 | SET FOREIGN_KEY_CHECKS=0; 17 | 18 | -- ---------------------------- 19 | -- Table structure for knowledgegraphdomain 20 | -- ---------------------------- 21 | DROP TABLE IF EXISTS `knowledgegraphdomain`; 22 | CREATE TABLE `knowledgegraphdomain` ( 23 | `id` int(11) NOT NULL AUTO_INCREMENT, 24 | `name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL, 25 | `nodecount` int(11) NOT NULL DEFAULT '0', 26 | `shipcount` int(11) NOT NULL, 27 | `status` int(11) NOT NULL, 28 | `createuser` varchar(255) DEFAULT NULL, 29 | PRIMARY KEY (`id`) 30 | ) ENGINE=InnoDB AUTO_INCREMENT=89 DEFAULT CHARSET=utf8; 31 | 32 | -- ---------------------------- 33 | -- Table structure for knowledgenodedetail 34 | -- ---------------------------- 35 | DROP TABLE IF EXISTS `knowledgenodedetail`; 36 | CREATE TABLE `knowledgenodedetail` ( 37 | `ID` int(11) NOT NULL AUTO_INCREMENT COMMENT '领域关系主键', 38 | `DomainId` int(11) DEFAULT NULL COMMENT '知识图谱领域主键', 39 | `NodeId` int(11) DEFAULT NULL COMMENT '关系定义主键', 40 | `Status` int(11) DEFAULT '1', 41 | `Content` text CHARACTER SET utf8 COLLATE utf8_general_ci, 42 | `CreateUser` varchar(255) DEFAULT NULL, 43 | `CreateTime` datetime DEFAULT NULL, 44 | `ModifyUser` varchar(255) DEFAULT NULL, 45 | `ModifyTime` datetime DEFAULT NULL, 46 | PRIMARY KEY (`ID`), 47 | KEY `domainid` (`DomainId`) USING BTREE, 48 | KEY `nodeid` (`NodeId`) USING BTREE 49 | ) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8; 50 | 51 | -- ---------------------------- 52 | -- Table structure for knowledgenodedetailfile 53 | -- ---------------------------- 54 | DROP TABLE IF EXISTS `knowledgenodedetailfile`; 55 | CREATE TABLE `knowledgenodedetailfile` ( 56 | `ID` int(11) NOT NULL AUTO_INCREMENT COMMENT '领域关系主键', 57 | `DomainId` int(11) DEFAULT NULL COMMENT '知识图谱领域主键', 58 | `NodeId` int(11) DEFAULT NULL COMMENT '关系定义主键', 59 | `FileName` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '若是本地上传则为文件名称,若是网络链接则保存为链接', 60 | `ImageType` int(11) DEFAULT '0' COMMENT '0=本地上传,1=网络链接', 61 | `Status` int(11) DEFAULT '1', 62 | `CreateUser` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL, 63 | `CreateTime` datetime DEFAULT NULL, 64 | `ModifyUser` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL, 65 | `ModifyTime` datetime DEFAULT NULL, 66 | PRIMARY KEY (`ID`), 67 | KEY `domainid` (`DomainId`) USING BTREE, 68 | KEY `nodeid` (`NodeId`) USING BTREE 69 | ) ENGINE=InnoDB AUTO_INCREMENT=65 DEFAULT CHARSET=utf8; 70 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/readme.md: -------------------------------------------------------------------------------- 1 | # 图谱知识查询应用 2 | ## 主要是知识定位接口与知识详情接口: 3 | ``` 4 | 项目用到的接口都在KGManageController.java文件中 5 | 6 | 知识定位接口:getEntityIndex 7 | 知识详情接口:getEntityByName 8 | 9 | 在application.yml中配置neo4j 10 | ``` -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/KgmakerApplication.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | import org.springframework.context.annotation.ComponentScan; 6 | 7 | @SpringBootApplication 8 | @ComponentScan 9 | public class KgmakerApplication { 10 | 11 | public static void main(String[] args) { 12 | SpringApplication.run(KgmakerApplication.class, args); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/config/WebAppConfig.java: -------------------------------------------------------------------------------- 1 | 2 | package com.warmer.kgmaker.config; 3 | 4 | import javax.servlet.MultipartConfigElement; 5 | 6 | import org.springframework.beans.factory.annotation.Value; 7 | import org.springframework.boot.web.servlet.MultipartConfigFactory; 8 | import org.springframework.context.annotation.Bean; 9 | import org.springframework.context.annotation.Configuration; 10 | 11 | 12 | @Configuration 13 | public class WebAppConfig{ 14 | 15 | /** 16 | * 在配置文件中配置的文件保存路径 17 | */ 18 | @Value("${file.location}") 19 | private String location; 20 | @Value("${file.serverurl}") 21 | private String serverurl; 22 | 23 | public String getLocation() { 24 | return location; 25 | } 26 | 27 | public void setLocation(String location) { 28 | this.location = location; 29 | } 30 | 31 | @Bean 32 | public MultipartConfigElement multipartConfigElement(){ 33 | MultipartConfigFactory factory = new MultipartConfigFactory(); 34 | //文件最大KB,MB 35 | factory.setMaxFileSize("10MB"); 36 | //设置总上传数据总大小 37 | factory.setMaxRequestSize("100MB"); 38 | return factory.createMultipartConfig(); 39 | } 40 | 41 | /** 42 | * @return the serverurl 43 | */ 44 | public String getServerurl() { 45 | return serverurl; 46 | } 47 | 48 | /** 49 | * @param serverurl the serverurl to set 50 | */ 51 | public void setServerurl(String serverurl) { 52 | this.serverurl = serverurl; 53 | } 54 | } -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/controller/BaseController.java: -------------------------------------------------------------------------------- 1 | 2 | package com.warmer.kgmaker.controller; 3 | 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | 7 | public class BaseController { 8 | protected Logger log = LoggerFactory.getLogger(getClass()); 9 | } 10 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/controller/NLPController.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.controller; 2 | 3 | import com.hankcs.hanlp.HanLP; 4 | import com.hankcs.hanlp.seg.Segment; 5 | import com.hankcs.hanlp.seg.common.Term; 6 | import org.springframework.stereotype.Controller; 7 | import org.springframework.ui.Model; 8 | import org.springframework.web.bind.annotation.RequestMapping; 9 | import org.springframework.web.bind.annotation.RequestParam; 10 | import org.springframework.web.bind.annotation.ResponseBody; 11 | 12 | import java.util.ArrayList; 13 | import java.util.HashMap; 14 | import java.util.List; 15 | import java.util.Map; 16 | 17 | @Controller 18 | @RequestMapping("/kg") 19 | public class NLPController { 20 | 21 | @RequestMapping("/popse") 22 | public String popse(Model model) { 23 | return "kg/popse"; 24 | } 25 | /* 26 | * 关键字与其词性的map键值对集合 == 句子抽象 27 | */ 28 | @RequestMapping("/getnlpword") 29 | @ResponseBody 30 | public Map query(@RequestParam(value = "q") String question) throws Exception { 31 | Map result=new HashMap(); 32 | //分词 33 | List> resultMap=queryAbstract(question); 34 | result.put("code", 200); 35 | result.put("data", resultMap); 36 | return result; 37 | } 38 | 39 | 40 | 41 | public List> queryAbstract(String querySentence) { 42 | // 句子抽象化 43 | Segment segment = HanLP.newSegment().enableOffset(true).enableCustomDictionary(true); 44 | List terms = segment.seg(querySentence); 45 | List> sens=new ArrayList>(); 46 | for (Term term : terms) { 47 | HashMap abstractMap = new HashMap(); 48 | abstractMap.put("word", term.word); 49 | abstractMap.put("pos", term.offset); 50 | abstractMap.put("nature", term.nature.toString()); 51 | sens.add(abstractMap); 52 | } 53 | return sens; 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/controller/QuestionController.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.controller; 2 | 3 | import java.io.File; 4 | import java.io.IOException; 5 | import java.util.ArrayList; 6 | import java.util.HashMap; 7 | import java.util.List; 8 | import java.util.Map; 9 | import java.util.Set; 10 | 11 | import org.springframework.web.bind.annotation.RequestMapping; 12 | import org.springframework.web.bind.annotation.RequestParam; 13 | import org.springframework.web.bind.annotation.RestController; 14 | 15 | import com.hankcs.hanlp.HanLP; 16 | import com.hankcs.hanlp.classification.classifiers.IClassifier; 17 | import com.hankcs.hanlp.classification.classifiers.NaiveBayesClassifier; 18 | import com.hankcs.hanlp.classification.models.NaiveBayesModel; 19 | import com.hankcs.hanlp.corpus.io.IOUtil; 20 | import com.hankcs.hanlp.seg.Segment; 21 | import com.hankcs.hanlp.seg.common.Term; 22 | import com.warmer.kgmaker.util.TestUtility; 23 | 24 | @RestController 25 | @RequestMapping("/kg") 26 | public class QuestionController { 27 | 28 | /** 29 | * 搜狗文本分类语料库5个类目,每个类目下1000篇文章,共计5000篇文章 30 | *//* 31 | public static final String CORPUS_FOLDER = TestUtility.ensureTestData("搜狗文本分类语料库迷你版", "http://hanlp.linrunsoft.com/release/corpus/sogou-text-classification-corpus-mini.zip"); 32 | *//** 33 | * 模型保存路径 34 | *//* 35 | public static final String MODEL_PATH = "data/test/classification-model.ser"; 36 | *//** 37 | * 关键字与其词性的map键值对集合 == 句子抽象 38 | *//* 39 | Map abstractMap; 40 | @RequestMapping("/query") 41 | public HashMap query(@RequestParam(value = "question") String question) throws Exception { 42 | HashMap resultMap = new HashMap(); 43 | IClassifier classifier = new NaiveBayesClassifier(trainOrLoadModel()); 44 | predict(classifier, question); 45 | //分词 46 | String words=queryAbstract(question); 47 | resultMap.put("domain", classifier.classify(question)); 48 | resultMap.put("words", words); 49 | return resultMap; 50 | } 51 | 52 | 53 | private static void predict(IClassifier classifier, String text) 54 | { 55 | System.out.printf("《%s》 属于分类 【%s】\n", text, classifier.classify(text)); 56 | } 57 | 58 | private static NaiveBayesModel trainOrLoadModel() throws IOException 59 | { 60 | NaiveBayesModel model = (NaiveBayesModel) IOUtil.readObjectFrom(MODEL_PATH); 61 | if (model != null) return model; 62 | 63 | File corpusFolder = new File(CORPUS_FOLDER); 64 | if (!corpusFolder.exists() || !corpusFolder.isDirectory()) 65 | { 66 | System.err.println("没有文本分类语料,请阅读IClassifier.train(java.lang.String)中定义的语料格式与语料下载:" + 67 | "https://github.com/hankcs/HanLP/wiki/%E6%96%87%E6%9C%AC%E5%88%86%E7%B1%BB%E4%B8%8E%E6%83%85%E6%84%9F%E5%88%86%E6%9E%90"); 68 | System.exit(1); 69 | } 70 | 71 | IClassifier classifier = new NaiveBayesClassifier(); // 创建分类器,更高级的功能请参考IClassifier的接口定义 72 | classifier.train(CORPUS_FOLDER); // 训练后的模型支持持久化,下次就不必训练了 73 | model = (NaiveBayesModel) classifier.getModel(); 74 | IOUtil.saveObjectTo(model, MODEL_PATH); 75 | return model; 76 | } 77 | public String queryAbstract(String querySentence) { 78 | 79 | // 句子抽象化 80 | Segment segment = HanLP.newSegment().enableCustomDictionary(true); 81 | List terms = segment.seg(querySentence); 82 | String abstractQuery = ""; 83 | abstractMap = new HashMap(); 84 | int nrCount = 0; //nr 人名词性这个 词语出现的频率 85 | for (Term term : terms) { 86 | String word = term.word; 87 | String termStr = term.toString(); 88 | System.out.println(termStr); 89 | if (termStr.contains("nm")) { //nm 电影名 90 | abstractQuery += "nm "; 91 | abstractMap.put("nm", word); 92 | } else if (termStr.contains("nr") && nrCount == 0) { //nr 人名 93 | abstractQuery += "nnt "; 94 | abstractMap.put("nnt", word); 95 | nrCount++; 96 | }else if (termStr.contains("nr") && nrCount == 1) { //nr 人名 再出现一次,改成nnr 97 | abstractQuery += "nnr "; 98 | abstractMap.put("nnr", word); 99 | nrCount++; 100 | }else if (termStr.contains("x")) { //x 评分 101 | abstractQuery += "x "; 102 | abstractMap.put("x", word); 103 | } else if (termStr.contains("ng")) { //ng 类型 104 | abstractQuery += "ng "; 105 | abstractMap.put("ng", word); 106 | } 107 | else { 108 | abstractQuery += word + " "; 109 | } 110 | } 111 | System.out.println("========HanLP分词结束========"); 112 | return abstractQuery; 113 | } 114 | 115 | public String queryExtenstion(String queryPattern) { 116 | // 句子还原 117 | Set set = abstractMap.keySet(); 118 | for (String key : set) { 119 | *//** 120 | * 如果句子模板中含有抽象的词性 121 | *//* 122 | if (queryPattern.contains(key)) { 123 | 124 | *//** 125 | * 则替换抽象词性为具体的值 126 | *//* 127 | String value = abstractMap.get(key); 128 | queryPattern = queryPattern.replace(key, value); 129 | } 130 | } 131 | String extendedQuery = queryPattern; 132 | *//** 133 | * 当前句子处理完,抽象map清空释放空间并置空,等待下一个句子的处理 134 | *//* 135 | abstractMap.clear(); 136 | abstractMap = null; 137 | return extendedQuery; 138 | }*/ 139 | 140 | } 141 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/dal/IKGraphRepository.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.dal; 2 | 3 | import com.warmer.kgmaker.entity.QAEntityItem; 4 | import com.warmer.kgmaker.query.GraphQuery; 5 | import com.warmer.kgmaker.util.GraphPageRecord; 6 | 7 | import java.util.HashMap; 8 | import java.util.List; 9 | import java.util.Map; 10 | 11 | 12 | 13 | public interface IKGraphRepository { 14 | /** 15 | * 领域标签分页 16 | * @param queryItem 17 | * @return 18 | */ 19 | GraphPageRecord> getPageDomain(GraphQuery queryItem); 20 | /** 21 | * 删除Neo4j 标签 22 | * 23 | * @param domain 24 | */ 25 | void deleteKGdomain(String domain); 26 | 27 | /** 28 | * 查询图谱节点和关系 29 | * 30 | * @param query 31 | * @return node relationship 32 | */ 33 | HashMap getdomaingraph(GraphQuery query); 34 | 35 | /** 36 | * 获取节点列表 37 | * 38 | * @param domain 39 | * @param pageIndex 40 | * @param pageSize 41 | * @return 42 | */ 43 | HashMap getdomainnodes(String domain, Integer pageIndex, Integer pageSize); 44 | 45 | /** 46 | * 获取某个领域指定节点拥有的上下级的节点数 47 | * 48 | * @param domain 49 | * @param nodeid 50 | * @return long 数值 51 | */ 52 | long getrelationnodecount(String domain, long nodeid); 53 | 54 | /** 55 | * 创建领域,默认创建一个新的节点,给节点附上默认属性 56 | * 57 | * @param domain 58 | */ 59 | void createdomain(String domain); 60 | 61 | /** 62 | * 获取/展开更多节点,找到和该节点有关系的节点 63 | * 64 | * @param domain 65 | * @param nodeid 66 | * @return 67 | */ 68 | HashMap getmorerelationnode(String domain, String nodeid); 69 | 70 | /** 71 | * 更新节点名称 72 | * 73 | * @param domain 74 | * @param nodeid 75 | * @param nodename 76 | * @return 修改后的节点 77 | */ 78 | HashMap updatenodename(String domain, String nodeid, String nodename); 79 | 80 | /** 81 | * 创建单个节点 82 | * 83 | * @param domain 84 | * @param entity 85 | * @return 86 | */ 87 | HashMap createnode(String domain, QAEntityItem entity); 88 | 89 | /** 90 | * 批量创建节点和关系 91 | * 92 | * @param domain 93 | * 领域 94 | * @param sourcename 95 | * 源节点 96 | * @param relation 97 | * 关系 98 | * @param targetnames 99 | * 目标节点数组 100 | * @return 101 | */ 102 | HashMap batchcreatenode(String domain, String sourcename, String relation, String[] targetnames); 103 | 104 | /** 105 | * 批量创建下级节点 106 | * 107 | * @param domain 108 | * 领域 109 | * @param sourceid 110 | * 源节点id 111 | * @param entitytype 112 | * 节点类型 113 | * @param targetnames 114 | * 目标节点名称数组 115 | * @param relation 116 | * 关系 117 | * @return 118 | */ 119 | HashMap batchcreatechildnode(String domain, String sourceid, Integer entitytype, 120 | String[] targetnames, String relation); 121 | 122 | /** 123 | * 批量创建同级节点 124 | * 125 | * @param domain 126 | * 领域 127 | * @param entitytype 128 | * 节点类型 129 | * @param sourcenames 130 | * 节点名称 131 | * @return 132 | */ 133 | List> batchcreatesamenode(String domain, Integer entitytype, String[] sourcenames); 134 | 135 | /** 136 | * 添加关系 137 | * 138 | * @param domain 139 | * 领域 140 | * @param sourceid 141 | * 源节点id 142 | * @param targetid 143 | * 目标节点id 144 | * @param ship 145 | * 关系 146 | * @return 147 | */ 148 | HashMap createlink(String domain, long sourceid, long targetid, String ship); 149 | 150 | /** 151 | * 更新关系 152 | * 153 | * @param domain 154 | * 领域 155 | * @param shipid 156 | * 关系id 157 | * @param shipname 158 | * 关系名称 159 | * @return 160 | */ 161 | HashMap updatelink(String domain, long shipid, String shipname); 162 | 163 | /** 164 | * 删除节点(先删除关系再删除节点) 165 | * 166 | * @param domain 167 | * @param nodeid 168 | * @return 169 | */ 170 | List> deletenode(String domain, long nodeid); 171 | 172 | /** 173 | * 删除关系 174 | * 175 | * @param domain 176 | * @param shipid 177 | */ 178 | void deletelink(String domain, long shipid); 179 | 180 | /** 181 | * 段落识别出的三元组生成图谱 182 | * 183 | * @param domain 184 | * @param entitytype 185 | * @param operatetype 186 | * @param sourceid 187 | * @param rss 188 | * 关系三元组 189 | * [[startname;ship;endname],[startname1;ship1;endname1],[startname2;ship2;endname2]] 190 | * @return node relationship 191 | */ 192 | HashMap createGraphByText(String domain, Integer entitytype, Integer operatetype, Integer sourceid, 193 | String[] rss); 194 | /** 195 | * 批量创建节点,关系 196 | * @param domain 197 | * @param params 三元组 sourcenode,relationship,targetnode 198 | */ 199 | void batchcreateGraph(String domain, List> params); 200 | /** 201 | * 更新节点有无附件 202 | * @param domain 203 | * @param nodeId 204 | * @param status 205 | */ 206 | void updateNodeFileStatus(String domain,long nodeId, int status); 207 | /** 208 | * 导入csv 209 | * @param domain 210 | * @param csvUrl 211 | * @param status 212 | */ 213 | void batchInsertByCSV(String domain, String csvUrl, int status) ; 214 | void updateCorrdOfNode(String domain, String uuid, Double fx, Double fy); 215 | } 216 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/dal/IKnowledgegraphRepository.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.dal; 2 | 3 | import org.apache.ibatis.annotations.Mapper; 4 | import org.apache.ibatis.annotations.Param; 5 | 6 | import java.util.List; 7 | import java.util.Map; 8 | 9 | @Mapper 10 | public interface IKnowledgegraphRepository { 11 | List> getDomains(); 12 | List> getDomainList(@Param("domainname")String domainname,@Param("createuser")String createuser); 13 | List> getRelationshipList(@Param("domainid")Integer domainid,@Param("relationtype")Integer relationtype,@Param("shipname")String shipname); 14 | void saveDomain(@Param("params") Map map); 15 | void updateDomain(@Param("params") Map map); 16 | void deleteDomain(@Param("id") Integer id); 17 | List> getDomainByName(@Param("domainname") String domainname); 18 | List> getDomainById(@Param("domainid")Integer domainid); 19 | 20 | void saveNodeImage(@Param("maplist") List> mapList); 21 | void saveNodeContent(@Param("params") Map map); 22 | void updateNodeContent(@Param("params") Map map); 23 | List> getNodeImageList(@Param("domainid") Integer domainid,@Param("nodeid") Integer nodeid); 24 | List> getNodeContent(@Param("domainid") Integer domainid,@Param("nodeid") Integer nodeid); 25 | void deleteNodeImage(@Param("domainid") Integer domainid,@Param("nodeid") Integer nodeid); 26 | } 27 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/entity/QAEntityItem.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.entity; 2 | 3 | import java.io.Serializable; 4 | 5 | @SuppressWarnings("serial") 6 | public class QAEntityItem implements Serializable{ 7 | private long uuid; 8 | private String name;//显示名称 9 | private String color;//对应关系数据库字段 10 | private Integer r; 11 | /*private String x; 12 | private String y;*/ 13 | public long getUuid() { 14 | return uuid; 15 | } 16 | public void setUuid(long uuid) { 17 | this.uuid = uuid; 18 | } 19 | public String getName() { 20 | return name; 21 | } 22 | public void setName(String name) { 23 | this.name = name; 24 | } 25 | public String getColor() { 26 | return color; 27 | } 28 | public void setColor(String color) { 29 | this.color = color; 30 | } 31 | public Integer getR() { 32 | return r; 33 | } 34 | public void setR(Integer r) { 35 | this.r = r; 36 | } 37 | /*public String getX() { 38 | return x; 39 | } 40 | public void setX(String x) { 41 | this.x = x; 42 | } 43 | public String getY() { 44 | return y; 45 | } 46 | public void setY(String y) { 47 | this.y = y; 48 | }*/ 49 | 50 | } 51 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/query/GraphQuery.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.query; 2 | 3 | public class GraphQuery{ 4 | 5 | private int domainid; 6 | private String domain; 7 | private String nodename; 8 | private String[] relation; 9 | private int matchtype ; 10 | private int pageSize = 10; 11 | private int pageIndex = 1; 12 | /** 13 | * @return the pageSize 14 | */ 15 | public int getPageSize() { 16 | return pageSize; 17 | } 18 | /** 19 | * @param pageSize the pageSize to set 20 | */ 21 | public void setPageSize(int pageSize) { 22 | this.pageSize = pageSize; 23 | } 24 | /** 25 | * @return the pageIndex 26 | */ 27 | public int getPageIndex() { 28 | return pageIndex; 29 | } 30 | /** 31 | * @param pageIndex the pageIndex to set 32 | */ 33 | public void setPageIndex(int pageIndex) { 34 | this.pageIndex = pageIndex; 35 | } 36 | /** 37 | * @return the domain 38 | */ 39 | public String getDomain() { 40 | return domain; 41 | } 42 | /** 43 | * @param domain the domain to set 44 | */ 45 | public void setDomain(String domain) { 46 | this.domain = domain; 47 | } 48 | /** 49 | * @return the matchtype 50 | */ 51 | public int getMatchtype() { 52 | return matchtype; 53 | } 54 | /** 55 | * @param matchtype the matchtype to set 56 | */ 57 | public void setMatchtype(int matchtype) { 58 | this.matchtype = matchtype; 59 | } 60 | /** 61 | * @return the nodename 62 | */ 63 | public String getNodename() { 64 | return nodename; 65 | } 66 | /** 67 | * @param nodename the nodename to set 68 | */ 69 | public void setNodename(String nodename) { 70 | this.nodename = nodename; 71 | } 72 | /** 73 | * @return the relation 74 | */ 75 | public String[] getRelation() { 76 | return relation; 77 | } 78 | /** 79 | * @param relation the relation to set 80 | */ 81 | public void setRelation(String[] relation) { 82 | this.relation = relation; 83 | } 84 | /** 85 | * @return the domainid 86 | */ 87 | public int getDomainid() { 88 | return domainid; 89 | } 90 | /** 91 | * @param domainid the domainid to set 92 | */ 93 | public void setDomainid(int domainid) { 94 | this.domainid = domainid; 95 | } 96 | 97 | 98 | } 99 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/service/IKnowledgegraphService.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.service; 2 | 3 | 4 | import java.util.List; 5 | import java.util.Map; 6 | 7 | public interface IKnowledgegraphService { 8 | List> getDomains(); 9 | List> getDomainList(String domainname,String createuser); 10 | void saveDomain(Map map); 11 | void updateDomain(Map map); 12 | void deleteDomain(Integer id); 13 | List> getDomainByName(String domainname); 14 | List> getDomainById(Integer domainid); 15 | void saveNodeImage(List> mapList); 16 | void saveNodeContent(Map map); 17 | void updateNodeContent(Map map); 18 | List> getNodeImageList(Integer domainid,Integer nodeid); 19 | List> getNodeContent(Integer domainid,Integer nodeid); 20 | void deleteNodeImage(Integer domainid,Integer nodeid); 21 | } 22 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/service/IQuestionService.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.service; 2 | 3 | public interface IQuestionService { 4 | 5 | void showDictPath(); 6 | String answer(String question) throws Exception; 7 | } 8 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/service/impl/KGGraphService.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.service.impl; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.beans.factory.annotation.Qualifier; 5 | import org.springframework.stereotype.Service; 6 | 7 | import com.warmer.kgmaker.dal.IKGraphRepository; 8 | import com.warmer.kgmaker.entity.QAEntityItem; 9 | import com.warmer.kgmaker.query.GraphQuery; 10 | import com.warmer.kgmaker.service.IKGGraphService; 11 | import com.warmer.kgmaker.util.GraphPageRecord; 12 | 13 | import java.util.HashMap; 14 | import java.util.List; 15 | import java.util.Map; 16 | 17 | @Service 18 | public class KGGraphService implements IKGGraphService { 19 | 20 | @Autowired 21 | @Qualifier("KGraphRepository") 22 | private IKGraphRepository kgRepository; 23 | 24 | @Override 25 | public GraphPageRecord> getPageDomain(GraphQuery queryItem) { 26 | return kgRepository.getPageDomain(queryItem); 27 | } 28 | @Override 29 | public void deleteKGdomain(String domain) { 30 | kgRepository.deleteKGdomain(domain); 31 | } 32 | 33 | @Override 34 | public HashMap getdomaingraph(GraphQuery query) { 35 | return kgRepository.getdomaingraph(query); 36 | } 37 | 38 | @Override 39 | public HashMap getdomainnodes(String domain, Integer pageIndex, Integer pageSize) { 40 | return kgRepository.getdomainnodes(domain, pageIndex, pageSize); 41 | } 42 | 43 | @Override 44 | public long getrelationnodecount(String domain, long nodeid) { 45 | return kgRepository.getrelationnodecount(domain, nodeid); 46 | } 47 | 48 | @Override 49 | public void createdomain(String domain) { 50 | kgRepository.createdomain(domain); 51 | } 52 | 53 | @Override 54 | public HashMap getmorerelationnode(String domain, String nodeid) { 55 | return kgRepository.getmorerelationnode(domain, nodeid); 56 | } 57 | 58 | @Override 59 | public HashMap updatenodename(String domain, String nodeid, String nodename) { 60 | return kgRepository.updatenodename(domain, nodeid, nodename); 61 | } 62 | 63 | @Override 64 | public HashMap createnode(String domain, QAEntityItem entity) { 65 | return kgRepository.createnode(domain, entity); 66 | } 67 | 68 | @Override 69 | public HashMap batchcreatenode(String domain, String sourcename, String relation, 70 | String[] targetnames) { 71 | return kgRepository.batchcreatenode(domain, sourcename, relation, targetnames); 72 | } 73 | 74 | @Override 75 | public HashMap batchcreatechildnode(String domain, String sourceid, Integer entitytype, 76 | String[] targetnames, String relation) { 77 | return kgRepository.batchcreatechildnode(domain, sourceid, entitytype, targetnames, relation); 78 | } 79 | 80 | @Override 81 | public List> batchcreatesamenode(String domain, Integer entitytype, String[] sourcenames) { 82 | return kgRepository.batchcreatesamenode(domain, entitytype, sourcenames); 83 | } 84 | 85 | @Override 86 | public HashMap createlink(String domain, long sourceid, long targetid, String ship) { 87 | return kgRepository.createlink(domain, sourceid, targetid, ship); 88 | } 89 | 90 | @Override 91 | public HashMap updatelink(String domain, long shipid, String shipname) { 92 | return kgRepository.updatelink(domain, shipid, shipname); 93 | } 94 | 95 | @Override 96 | public List> deletenode(String domain, long nodeid) { 97 | return kgRepository.deletenode(domain, nodeid); 98 | } 99 | 100 | @Override 101 | public void deletelink(String domain, long shipid) { 102 | kgRepository.deletelink(domain, shipid); 103 | } 104 | 105 | @Override 106 | public HashMap createGraphByText(String domain, Integer entitytype, Integer operatetype, 107 | Integer sourceid, String[] rss) { 108 | return kgRepository.createGraphByText(domain, entitytype, operatetype, sourceid, rss); 109 | } 110 | 111 | @Override 112 | public void batchcreateGraph(String domain, List> params) { 113 | kgRepository.batchcreateGraph(domain, params); 114 | } 115 | 116 | @Override 117 | public void updateNodeFileStatus(String domain, long nodeId, int status) { 118 | kgRepository.updateNodeFileStatus(domain,nodeId,status); 119 | } 120 | 121 | @Override 122 | public void updateCorrdOfNode(String domain, String uuid, Double fx, Double fy) { 123 | kgRepository.updateCorrdOfNode(domain,uuid,fx,fy); 124 | } 125 | 126 | @Override 127 | public void batchInsertByCSV(String domain, String csvUrl, int status) { 128 | kgRepository.batchInsertByCSV(domain, csvUrl, status); 129 | } 130 | 131 | 132 | 133 | } 134 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/service/impl/KnowledgegraphService.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.service.impl; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | import org.springframework.stereotype.Service; 8 | 9 | import com.warmer.kgmaker.dal.IKnowledgegraphRepository; 10 | import com.warmer.kgmaker.service.IKnowledgegraphService; 11 | 12 | @Service 13 | public class KnowledgegraphService implements IKnowledgegraphService { 14 | 15 | @Autowired 16 | private IKnowledgegraphRepository knowledgegraphRepository; 17 | 18 | @Override 19 | public List> getDomainList(String domainname,String createuser) { 20 | List> list = knowledgegraphRepository.getDomainList(domainname,createuser); 21 | return list; 22 | } 23 | 24 | @Override 25 | public void saveDomain(Map map) { 26 | knowledgegraphRepository.saveDomain(map); 27 | } 28 | 29 | @Override 30 | public void updateDomain(Map map) { 31 | knowledgegraphRepository.updateDomain(map); 32 | } 33 | 34 | @Override 35 | public void deleteDomain(Integer id) { 36 | knowledgegraphRepository.deleteDomain(id); 37 | } 38 | 39 | @Override 40 | public List> getDomainByName(String domainname) { 41 | return knowledgegraphRepository.getDomainByName(domainname); 42 | } 43 | 44 | @Override 45 | public List> getDomains() { 46 | return knowledgegraphRepository.getDomains(); 47 | } 48 | 49 | @Override 50 | public List> getDomainById(Integer domainid) { 51 | return knowledgegraphRepository.getDomainById(domainid); 52 | } 53 | 54 | @Override 55 | public void saveNodeImage(List> mapList) { 56 | knowledgegraphRepository.saveNodeImage(mapList); 57 | } 58 | 59 | @Override 60 | public void saveNodeContent(Map map) { 61 | knowledgegraphRepository.saveNodeContent(map); 62 | } 63 | 64 | @Override 65 | public void updateNodeContent(Map map) { 66 | knowledgegraphRepository.updateNodeContent(map); 67 | } 68 | 69 | @Override 70 | public List> getNodeImageList(Integer domainid, Integer nodeid) { 71 | return knowledgegraphRepository.getNodeImageList(domainid,nodeid); 72 | } 73 | 74 | @Override 75 | public List> getNodeContent(Integer domainid, Integer nodeid) { 76 | return knowledgegraphRepository.getNodeContent(domainid,nodeid); 77 | } 78 | 79 | @Override 80 | public void deleteNodeImage(Integer domainid, Integer nodeid) { 81 | knowledgegraphRepository.deleteNodeImage(domainid,nodeid); 82 | } 83 | 84 | } 85 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/util/CSVUtil.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.util; 2 | 3 | import com.csvreader.CsvWriter; 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | import org.springframework.web.multipart.MultipartFile; 7 | 8 | import java.io.*; 9 | import java.nio.charset.Charset; 10 | import java.util.ArrayList; 11 | import java.util.Arrays; 12 | import java.util.List; 13 | 14 | /** 15 | * 16 | * @author ReverseG 17 | * 18 | */ 19 | 20 | public class CSVUtil { 21 | protected static Logger log = LoggerFactory.getLogger(CSVUtil.class); 22 | 23 | public static void createCsvFile(List> list, String path,String fileName) { 24 | String fileNm=path+fileName; 25 | log.info("CSVUtil->createFile方法开始. " + fileNm); 26 | File dir=new File(path); 27 | if(!dir.exists()) 28 | dir.mkdirs(); 29 | CsvWriter csvWriter = new CsvWriter(fileNm, ',', Charset.forName("UTF-8")); 30 | int rowSize = list.size(); 31 | int colSize = list.get(0).size(); 32 | for (int i = 0; i < rowSize; i++) { 33 | List lst = list.get(i); 34 | String[] cntArr = new String[colSize]; 35 | for (int j = 0; j < colSize; j++) { 36 | cntArr[j] = lst.get(j); 37 | } 38 | try { 39 | csvWriter.writeRecord(cntArr); 40 | } catch (IOException e) { 41 | log.error("CSVUtil->createFile: 文件输出异常" + e.getMessage()); 42 | } 43 | } 44 | 45 | csvWriter.close(); 46 | 47 | log.info("CSVUtil->createFile方法结束. " + fileNm); 48 | 49 | } 50 | 51 | public static List> readCsvFile(MultipartFile file) { 52 | log.info("CSVUtil->readCsvFile方法开始. "); 53 | try { 54 | List> rowList = new ArrayList>(); 55 | try { 56 | String charset = "utf-8"; 57 | BufferedReader reader = new BufferedReader(new InputStreamReader(file.getInputStream(),charset)); 58 | String line = null; 59 | while ((line = reader.readLine()) != null) { 60 | String[] rowArr = line.split(",");//CSV格式文件为逗号分隔符文件,这里根据逗号切分 61 | List row = Arrays.asList(rowArr); 62 | rowList.add(row); 63 | } 64 | } catch (FileNotFoundException e) { 65 | e.printStackTrace(); 66 | } 67 | return rowList; 68 | } catch (Exception e) { 69 | e.printStackTrace(); 70 | } 71 | log.info("CSVUtil->readCsvFile方法结束. "); 72 | return null; 73 | } 74 | 75 | public static List readCsvHead(MultipartFile file) { 76 | log.info("CSVUtil->readCsvFile方法开始. "); 77 | try { 78 | List rowList = new ArrayList<>(); 79 | try { 80 | String charset = "utf-8"; 81 | BufferedReader reader = new BufferedReader(new InputStreamReader(file.getInputStream(),charset)); 82 | String line = null; 83 | if ((line = reader.readLine()) != null) { 84 | String[] rowArr = line.split(",");//CSV格式文件为逗号分隔符文件,这里根据逗号切分 85 | rowList = Arrays.asList(rowArr); 86 | } 87 | } catch (FileNotFoundException e) { 88 | e.printStackTrace(); 89 | } 90 | return rowList; 91 | } catch (Exception e) { 92 | e.printStackTrace(); 93 | } 94 | log.info("CSVUtil->readCsvHead方法结束. "); 95 | return null; 96 | } 97 | 98 | public static void excelTocsv(String filePath) { 99 | log.info("CSVUtil->createFile方法开始. "); 100 | 101 | log.info("CSVUtil->createFile方法结束. "); 102 | 103 | } 104 | // public static void main(String[] args) throws IOException { 105 | // List> list = new ArrayList>(); 106 | // List lst = null; 107 | // lst = new ArrayList(); 108 | // for (int i=1; i<4; i++) { 109 | // lst.add("第" + i + "列"); 110 | // } 111 | // list.add(lst); 112 | // 113 | // for (int j=0; j<3; j++) { 114 | // lst = new ArrayList(); 115 | // for (int i=4; i<7; i++) { 116 | // lst.add(j + i + ""); 117 | // } 118 | // list.add(lst); 119 | // } 120 | // 121 | // createFile(list, "csvDemo.csv"); 122 | // } 123 | } 124 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/util/ExcelUtil.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.util; 2 | 3 | import org.apache.poi.openxml4j.opc.OPCPackage; 4 | 5 | import java.io.File; 6 | import java.io.IOException; 7 | import java.util.ArrayList; 8 | 9 | public class ExcelUtil { 10 | // *************xlsx文件读取函数************************ 11 | // 在jdbc.properties上加上 excelUrl:xlsx文件的目录 12 | // excel_name为文件名,arg为需要查询的列号(输入数字则返回对应列 , 输入字符串则固定返回这个字符串) 13 | // 返回 14 | @SuppressWarnings({ "resource", "unused" }) 15 | public static ArrayList> xlsx_reader(String excel_name, ArrayList args) 16 | throws IOException { 17 | // 读取excel文件夹url 18 | 19 | String excelUrl = ""; 20 | File xlsxFile = new File(excelUrl + excel_name); 21 | if (!xlsxFile.exists()) { 22 | System.err.println("Not found or not a file: " + xlsxFile.getPath()); 23 | return null; 24 | } 25 | ArrayList> excel_output = new ArrayList>(); 26 | try { 27 | OPCPackage p; 28 | // p = OPCPackage.open(xlsxFile.getPath(), PackageAccess.READ); 29 | /* 30 | * XLSX2CSV xlsx2csv = new XLSX2CSV(p, 20); // 20代表最大列数 xlsx2csv.process(); 31 | * excel_output = xlsx2csv.get_output(); 32 | */ 33 | // p.close(); //释放 34 | } catch (Exception e) { 35 | // TODO Auto-generated catch block 36 | e.printStackTrace(); 37 | } 38 | 39 | System.out.println(excel_name + " 读取完毕"); 40 | 41 | // //读取xlsx文件 42 | // XSSFWorkbook xssfWorkbook = null; 43 | // //寻找目录读取文件 44 | // System.out.println("开始读取 "+excel_name); 45 | // File excelFile = new File(excelUrl+excel_name); 46 | // InputStream is = new FileInputStream(excelFile); 47 | // xssfWorkbook = new XSSFWorkbook(is); 48 | // 49 | // if(xssfWorkbook==null){ 50 | // System.out.println("未读取到内容,请检查路径!"); 51 | // return null; 52 | // }else{ 53 | // System.out.println(excel_name+" 读取完毕"); 54 | // } 55 | 56 | ArrayList> ans = new ArrayList>(); 57 | // 遍历xlsx中的sheet 58 | 59 | // 对于每个sheet,读取其中的每一行 60 | for (int rowNum = 0; rowNum < excel_output.size(); rowNum++) { 61 | ArrayList cur_output = excel_output.get(rowNum); 62 | ArrayList curarr = new ArrayList(); 63 | for (int columnNum = 0; columnNum < args.size(); columnNum++) { 64 | Object obj = args.get(columnNum); 65 | if (obj instanceof String) { 66 | curarr.add(obj.toString()); 67 | } else if (obj instanceof Integer) { 68 | String cell = cur_output.get((int) obj); 69 | curarr.add(cell); 70 | } else { 71 | System.out.print("类型错误!"); 72 | return null; 73 | } 74 | } 75 | ans.add(curarr); 76 | } 77 | 78 | return ans; 79 | } 80 | 81 | // // 判断后缀为xlsx的excel文件的数据类 82 | // @SuppressWarnings("deprecation") 83 | // private static String getValue(XSSFCell xssfRow) { 84 | // if (xssfRow == null) { 85 | // return null; 86 | // } 87 | // if (xssfRow.getCellType() == xssfRow.CELL_TYPE_BOOLEAN) { 88 | // return String.valueOf(xssfRow.getBooleanCellValue()); 89 | // } else if (xssfRow.getCellType() == xssfRow.CELL_TYPE_NUMERIC) { 90 | // double cur = xssfRow.getNumericCellValue(); 91 | // long longVal = Math.round(cur); 92 | // Object inputValue = null; 93 | // if (Double.parseDouble(longVal + ".0") == cur) 94 | // inputValue = longVal; 95 | // else 96 | // inputValue = cur; 97 | // return String.valueOf(inputValue); 98 | // } else if (xssfRow.getCellType() == xssfRow.CELL_TYPE_BLANK 99 | // || xssfRow.getCellType() == xssfRow.CELL_TYPE_ERROR) { 100 | // return ""; 101 | // } else { 102 | // return String.valueOf(xssfRow.getStringCellValue()); 103 | // } 104 | // } 105 | public static boolean isExcel2003(String filePath) { 106 | return filePath.matches("^.+\\.(?i)(xls)$"); 107 | } 108 | 109 | public static boolean isExcel2007(String filePath) { 110 | return filePath.matches("^.+\\.(?i)(xlsx)$"); 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/util/FileResponse.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.util; 2 | 3 | import java.util.List; 4 | 5 | public class FileResponse { 6 | private int success; //成功标准 0失败 1成功 7 | private List results;//文件results 8 | private String message; //错误信息 9 | 10 | public FileResponse() { 11 | } 12 | 13 | public FileResponse(int success, List results, String message) { 14 | this.success = success; 15 | this.results = results; 16 | this.message = message; 17 | } 18 | 19 | public int getSuccess() { 20 | return success; 21 | } 22 | 23 | public void setSuccess(int success) { 24 | this.success = success; 25 | } 26 | 27 | 28 | public String getMessage() { 29 | return message; 30 | } 31 | 32 | public void setMessage(String message) { 33 | this.message = message; 34 | } 35 | 36 | /** 37 | * @return the results 38 | */ 39 | public List getResults() { 40 | return results; 41 | } 42 | 43 | /** 44 | * @param results the results to set 45 | */ 46 | public void setResults(List results) { 47 | this.results = results; 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/util/FileResult.java: -------------------------------------------------------------------------------- 1 | /** 2 | * @Title: FileResult.java 3 | * @Package warmer.star.blog.util 4 | * @Description: TODO(用一句话描述该文件做什么) 5 | * @author tc 6 | * @date 2018年6月1日 下午5:37:09 7 | * @version V1.0 8 | */ 9 | package com.warmer.kgmaker.util; 10 | 11 | 12 | public class FileResult { 13 | private String name; 14 | public String getName() { 15 | return name; 16 | } 17 | public void setName(String name) { 18 | this.name = name; 19 | } 20 | public int getStatus() { 21 | return status; 22 | } 23 | public void setStatus(int status) { 24 | this.status = status; 25 | } 26 | public String getMessage() { 27 | return message; 28 | } 29 | public void setMessage(String message) { 30 | this.message = message; 31 | } 32 | /** 33 | * @return the url 34 | */ 35 | public String getUrl() { 36 | return url; 37 | } 38 | /** 39 | * @param url the url to set 40 | */ 41 | public void setUrl(String url) { 42 | this.url = url; 43 | } 44 | private int status; 45 | private String message; 46 | private String url; 47 | } 48 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/util/GraphPageRecord.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.util; 2 | 3 | import java.io.Serializable; 4 | import java.util.ArrayList; 5 | import java.util.List; 6 | 7 | @SuppressWarnings("serial") 8 | public class GraphPageRecord implements Serializable{ 9 | private int pageSize = 10; 10 | private int pageIndex = 1; 11 | private int totalCount = 0; 12 | private List nodeList = new ArrayList(); 13 | 14 | 15 | /** 16 | * 获取分页记录数量 17 | * @return 18 | */ 19 | public int getPageSize() { 20 | return pageSize; 21 | } 22 | /** 23 | * 获取当前页序号 24 | * @return 25 | */ 26 | public int getPageIndex() { 27 | return pageIndex; 28 | } 29 | /** 30 | * 设置当前页序号 31 | * @param pageIndex 32 | */ 33 | public void setPageIndex(int pageIndex) { 34 | if(pageIndex <= 0) { 35 | pageIndex = 1; 36 | } 37 | this.pageIndex = pageIndex; 38 | } 39 | public void setPageSize(int pageSize) { 40 | if(pageSize <= 0) { 41 | pageSize = 1; 42 | } 43 | this.pageSize = pageSize; 44 | } 45 | 46 | 47 | /** 48 | * 获取总记录数 49 | */ 50 | public int getTotalCount() { 51 | return totalCount; 52 | } 53 | /** 54 | * 获取总记录数 55 | * @param totalCount 56 | */ 57 | public void setTotalCount(int totalCount) { 58 | this.totalCount = totalCount; 59 | } 60 | 61 | /** 62 | * 获取Node检索结果列表 63 | * @return 64 | */ 65 | public List getNodeList() { 66 | return nodeList; 67 | } 68 | public void setNodeList(List nodeList) { 69 | this.nodeList=nodeList; 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/util/ImageUtil.java: -------------------------------------------------------------------------------- 1 | 2 | package com.warmer.kgmaker.util; 3 | 4 | import java.io.BufferedOutputStream; 5 | import java.io.File; 6 | import java.io.FileInputStream; 7 | import java.io.FileOutputStream; 8 | import java.io.IOException; 9 | 10 | import org.springframework.web.multipart.MultipartFile; 11 | 12 | public class ImageUtil { 13 | 14 | public static String getFilePath(String userCode) 15 | { 16 | return File.separator +userCode; 17 | } 18 | /** 19 | * 保存文件,直接以multipartFile形式 20 | * @param multipartFile 21 | * @param path 文件保存绝对路径 22 | * @return 返回文件名 23 | * @throws IOException 24 | */ 25 | public static String saveImg(MultipartFile multipartFile,String path) throws IOException { 26 | File file = new File(path); 27 | if (!file.exists()) { 28 | file.mkdirs(); 29 | } 30 | FileInputStream fileInputStream = (FileInputStream) multipartFile.getInputStream(); 31 | String fileName = UuidUtil.getUUID() + ".png"; 32 | BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(path + File.separator + fileName)); 33 | byte[] bs = new byte[1024]; 34 | int len; 35 | while ((len = fileInputStream.read(bs)) != -1) { 36 | bos.write(bs, 0, len); 37 | } 38 | bos.flush(); 39 | bos.close(); 40 | return fileName; 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/util/Neo4jConfig.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.util; 2 | 3 | import org.neo4j.driver.v1.AuthTokens; 4 | import org.neo4j.driver.v1.Driver; 5 | import org.neo4j.driver.v1.GraphDatabase; 6 | import org.springframework.beans.factory.annotation.Value; 7 | import org.springframework.context.annotation.Bean; 8 | import org.springframework.context.annotation.Configuration; 9 | 10 | @Configuration 11 | public class Neo4jConfig { 12 | @Value("${spring.neo4j.url}") 13 | private String url; 14 | 15 | @Value("${spring.neo4j.username}") 16 | private String username; 17 | 18 | @Value("${spring.neo4j.password}") 19 | private String password; 20 | 21 | /** 22 | * 图数据库驱动模式 23 | * 24 | * @return 25 | */ 26 | 27 | @Bean 28 | public Driver neo4jDriver() { 29 | return GraphDatabase.driver(url, AuthTokens.basic(username, password)); 30 | } 31 | 32 | } -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/util/QiniuUploadService.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.util; 2 | 3 | import org.apache.tomcat.util.http.fileupload.FileUploadException; 4 | import org.springframework.beans.factory.annotation.Value; 5 | import org.springframework.context.annotation.Configuration; 6 | import org.springframework.web.multipart.MultipartFile; 7 | 8 | import com.qiniu.util.Auth; 9 | 10 | @Configuration 11 | public class QiniuUploadService { 12 | //引入第一步的七牛配置 13 | @Value("${qiniu.access.key}") 14 | private String accesskey; 15 | 16 | @Value("${qiniu.secret.key}") 17 | private String secretKey; 18 | 19 | @Value("${qiniu.bucket.name}") 20 | private String bucketName; 21 | 22 | @Value("${qiniu.bucket.host.name}") 23 | private String bucketHostName; 24 | 25 | @Value("${qiniu.prefixName}") 26 | private String prefixName; 27 | //@Bean 28 | public String uploadImage(MultipartFile image,String fileName) throws FileUploadException { 29 | Auth auth = Auth.create(this.accesskey, this.secretKey); 30 | UploadUtil qiniuUtil = new QiniuUtil(this.bucketHostName, this.bucketName,auth); 31 | //return qiniuUtil.uploadFile(image,fileName); 32 | return qiniuUtil.uploadFile(this.prefixName,image); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/util/R.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.util; 2 | 3 | public class R { 4 | public int code; 5 | public T data; 6 | public String msg; 7 | 8 | public int getCode(){ 9 | return this.code; 10 | } 11 | 12 | public void setCode(int code) { 13 | this.code = code; 14 | } 15 | 16 | public T getData() { 17 | return data; 18 | } 19 | 20 | public void setData(T data) { 21 | this.data = data; 22 | } 23 | 24 | public String getMsg() { 25 | return this.msg; 26 | } 27 | 28 | public void setMsg(String msg) { 29 | this.msg = msg; 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/util/StringUtil.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.util; 2 | 3 | import java.util.ArrayList; 4 | import java.util.List; 5 | import java.util.Map; 6 | 7 | import org.apache.commons.lang3.ObjectUtils; 8 | 9 | public class StringUtil { 10 | /* 11 | * 是否为空字符串 12 | * @param str 13 | * @return 14 | */ 15 | public static boolean isBlank(String str){ 16 | int strLen; 17 | if (str == null || (strLen = str.length()) == 0) { 18 | return true; 19 | } 20 | for (int i = 0; i < strLen; i++) { 21 | if (Character.isWhitespace(str.charAt(i)) == false) { 22 | return false; 23 | } 24 | } 25 | return true; 26 | } 27 | 28 | public static boolean isNotBlank(String str){ 29 | return !isBlank(str); 30 | } 31 | /** 32 | * 连接方法 类似于javascript 33 | * @param join 连接字符串 34 | * @param strAry 需要连接的集合 35 | * @return 36 | */ 37 | public static String join(String join,String[] strAry){ 38 | StringBuffer sb=new StringBuffer(); 39 | for(int i=0,len =strAry.length;i cols,String colName){ 57 | List aColCons = new ArrayList(); 58 | for (Map map: 59 | cols) { 60 | aColCons.add(ObjectUtils.toString(map.get(colName))); 61 | } 62 | return join(join,aColCons); 63 | } 64 | 65 | public static String join(String join,List listStr){ 66 | StringBuffer sb=new StringBuffer(); 67 | for(int i=0,len =listStr.size();i termList = NotionalTokenizer.segment(text); 37 | String[] wordArray = new String[termList.size()]; 38 | Iterator iterator = termList.iterator(); 39 | for (int i = 0; i < wordArray.length; i++) 40 | { 41 | wordArray[i] = iterator.next().word; 42 | } 43 | return wordArray; 44 | } 45 | 46 | /** 47 | * 统计每个词的词频 48 | * 49 | * @param keywordArray 50 | * @return 51 | */ 52 | public static Map getKeywordCounts(String[] keywordArray) 53 | { 54 | Map counts = new HashMap(); 55 | 56 | Integer counter; 57 | for (int i = 0; i < keywordArray.length; ++i) 58 | { 59 | counter = counts.get(keywordArray[i]); 60 | if (counter == null) 61 | { 62 | counter = 0; 63 | } 64 | counts.put(keywordArray[i], ++counter); //增加词频 65 | } 66 | 67 | return counts; 68 | } 69 | 70 | /** 71 | * 加载一个文件夹下的所有语料 72 | * 73 | * @param path 74 | * @return 75 | */ 76 | public static Map loadCorpus(String path) 77 | { 78 | Map dataSet = new TreeMap(); 79 | File root = new File(path); 80 | File[] folders = root.listFiles(); 81 | if (folders == null) return null; 82 | for (File folder : folders) 83 | { 84 | if (folder.isFile()) continue; 85 | File[] files = folder.listFiles(); 86 | if (files == null) continue; 87 | String[] documents = new String[files.length]; 88 | for (int i = 0; i < files.length; i++) 89 | { 90 | documents[i] = IOUtil.readTxt(files[i].getAbsolutePath()); 91 | } 92 | dataSet.put(folder.getName(), documents); 93 | } 94 | 95 | return dataSet; 96 | } 97 | 98 | /** 99 | * 加载一个文件夹下的所有语料 100 | * 101 | * @param folderPath 102 | * @return 103 | */ 104 | public static Map loadCorpusWithException(String folderPath, String charsetName) throws IOException 105 | { 106 | if (folderPath == null) throw new IllegalArgumentException("参数 folderPath == null"); 107 | File root = new File(folderPath); 108 | if (!root.exists()) throw new IllegalArgumentException(String.format("目录 %s 不存在", root.getAbsolutePath())); 109 | if (!root.isDirectory()) 110 | throw new IllegalArgumentException(String.format("目录 %s 不是一个目录", root.getAbsolutePath())); 111 | 112 | Map dataSet = new TreeMap(); 113 | File[] folders = root.listFiles(); 114 | if (folders == null) return null; 115 | for (File folder : folders) 116 | { 117 | if (folder.isFile()) continue; 118 | File[] files = folder.listFiles(); 119 | if (files == null) continue; 120 | String[] documents = new String[files.length]; 121 | for (int i = 0; i < files.length; i++) 122 | { 123 | documents[i] = readTxt(files[i], charsetName); 124 | } 125 | dataSet.put(folder.getName(), documents); 126 | } 127 | 128 | return dataSet; 129 | } 130 | 131 | public static String readTxt(File file, String charsetName) throws IOException 132 | { 133 | FileInputStream is = new FileInputStream(file); 134 | byte[] targetArray = new byte[is.available()]; 135 | int len; 136 | int off = 0; 137 | while ((len = is.read(targetArray, off, targetArray.length - off)) != -1 && off < targetArray.length) 138 | { 139 | off += len; 140 | } 141 | is.close(); 142 | 143 | return new String(targetArray, charsetName); 144 | } 145 | 146 | public static Map loadCorpusWithException(String corpusPath) throws IOException 147 | { 148 | return loadCorpusWithException(corpusPath, "UTF-8"); 149 | } 150 | } -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/util/UploadUtil.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.util; 2 | 3 | import java.io.File; 4 | 5 | import org.apache.tomcat.util.http.fileupload.FileUploadException; 6 | import org.springframework.web.multipart.MultipartFile; 7 | 8 | public interface UploadUtil { 9 | String uploadFile(MultipartFile multipartFile) throws FileUploadException; 10 | 11 | String uploadFile(String filePath, MultipartFile multipartFile) throws FileUploadException; 12 | 13 | String uploadFile(MultipartFile multipartFile, String fileName) throws FileUploadException; 14 | 15 | String uploadFile(MultipartFile multipartFile, String fileName, String filePath) throws FileUploadException; 16 | 17 | String uploadFile(File file) throws FileUploadException; 18 | 19 | String uploadFile(String filePath, File file) throws FileUploadException; 20 | 21 | String uploadFile(File file, String fileName) throws FileUploadException; 22 | 23 | String uploadFile(File file, String fileName, String filePath) throws FileUploadException; 24 | 25 | String uploadFile(byte[] data) throws FileUploadException; 26 | 27 | String uploadFile(String filePath, byte[] data) throws FileUploadException; 28 | 29 | String uploadFile(byte[] data, String fileName) throws FileUploadException; 30 | 31 | String uploadFile(byte[] data, String fileName, String filePath) throws FileUploadException; 32 | 33 | } 34 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/java/com/warmer/kgmaker/util/UuidUtil.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker.util; 2 | 3 | import java.util.UUID; 4 | 5 | public class UuidUtil { 6 | public static String getUUID(){ 7 | UUID uuid=UUID.randomUUID(); 8 | String str = uuid.toString(); 9 | String uuidStr=str.replace("-", ""); 10 | return uuidStr; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | server: 2 | port: 8080 3 | spring: 4 | servlet: 5 | multipart: 6 | max-file-size: 100Mb 7 | maxRequestSize: 1000Mb 8 | thymeleaf: 9 | cache : false 10 | mode: HTML5 11 | neo4j: 12 | url: bolt://localhost:7687 13 | username: neo4j 14 | password: SSPKUsspku12345 15 | datasource: 16 | url: jdbc:mysql://localhost:3306/bd?useSSL=false&serverTimezone=UTC 17 | driverClassName: com.mysql.cj.jdbc.Driver 18 | username: root 19 | password: 12345 20 | type: com.zaxxer.hikari.HikariDataSource 21 | mybatis: 22 | typeAliasesPackage: com.warmer.kgmaker.entity 23 | mapperLocations: classpath*:/mapping/*.xml 24 | #配置分页插件pagehelper 25 | pagehelper: 26 | helperDialect: mysql 27 | reasonable: true 28 | supportMethodsArguments: true 29 | params: count=countSql 30 | #--------七牛云配置 31 | qiniu: 32 | access: 33 | key: 2J5BOKpbxDlzkYVYZ5dwGS3jAevVmOJwcL3fIdpw 34 | secret: 35 | key: 2LXAQbFFbFR_I76bseNEmu-Sjnh4RRaSRsazX5Dj 36 | bucket: 37 | name: nndt 38 | host: 39 | name: file.miaoleyan.com 40 | prefixName: /nndt 41 | #--------七牛云配置 42 | img: #//如果是Windows情况下,格式是 D:\\blog\\image\\ linx 格式"/home/blog/image/"; 43 | location : D:\\blog\\image\\ 44 | file: 45 | serverurl: http://localhost:8089 46 | location : D:\\kgmanager\\csv\\ #如果是Windows情况下,格式是 D:\\kgmanager\\csv\\ linx 格式"/home/kgmanager/csv/"; 47 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/hanlp.properties: -------------------------------------------------------------------------------- 1 | #本配置文件中的路径的根目录,根目录+其他路径=完整路径(支持相对路径,请参考:https://github.com/hankcs/HanLP/pull/254) 2 | #Windows用户请注意,路径分隔符统一使用/ 3 | root=D:/HanLP/ 4 | #核心词典路径 5 | CoreDictionaryPath=data/dictionary/CoreNatureDictionary.txt 6 | #2元语法词典路径 7 | BiGramDictionaryPath=data/dictionary/CoreNatureDictionary.ngram.txt 8 | #停用词词典路径 9 | CoreStopWordDictionaryPath=data/dictionary/stopwords.txt 10 | #同义词词典路径 11 | CoreSynonymDictionaryDictionaryPath=data/dictionary/synonym/CoreSynonym.txt 12 | #人名词典路径 13 | PersonDictionaryPath=data/dictionary/person/nr.txt 14 | #人名词典转移矩阵路径 15 | PersonDictionaryTrPath=data/dictionary/person/nr.tr.txt 16 | #繁简词典根目录 17 | tcDictionaryRoot=data/dictionary/tc 18 | #自定义词典路径,用;隔开多个自定义词典,空格开头表示在同一个目录,使用“文件名 词性”形式则表示这个词典的词性默认是该词性。优先级递减。 19 | #另外data/dictionary/custom/CustomDictionary.txt是个高质量的词库,请不要删除。所有词典统一使用【UTF-8】编码。 20 | #注意,每次更新自己定义的新词典myDict.txt的内容时,要删除同目录下的词典缓存文件CustomDictionary.txt.bin 21 | CustomDictionaryPath=data/dictionary/custom/CustomDictionary.txt; 现代汉语补充词库.txt; 全国地名大全.txt ns; 人名词典.txt; 机构名词典.txt; 上海地名.txt ns;data/dictionary/person/nrf.txt nrf; 22 | 23 | #CRF分词模型路径 24 | CRFSegmentModelPath=data/model/segment/CRFSegmentModel.txt 25 | #HMM分词模型 26 | HMMSegmentModelPath=data/model/segment/HMMSegmentModel.bin 27 | #分词结果是否展示词性 28 | ShowTermNature=true 29 | #IO适配器,实现com.hankcs.hanlp.corpus.io.IIOAdapter接口以在不同的平台(Hadoop、Redis等)上运行HanLP 30 | #默认的IO适配器如下,该适配器是基于普通文件系统的。 31 | #IOAdapter=com.hankcs.hanlp.corpus.io.FileIOAdapter 32 | #感知机词法分析器 33 | PerceptronCWSModelPath=data/model/perceptron/pku199801/cws.bin 34 | PerceptronPOSModelPath=data/model/perceptron/pku199801/pos.bin 35 | PerceptronNERModelPath=data/model/perceptron/pku199801/ner.bin 36 | #CRF词法分析器 37 | CRFCWSModelPath=data/model/crf/pku199801/cws.bin 38 | CRFPOSModelPath=data/model/crf/pku199801/pos.bin 39 | CRFNERModelPath=data/model/crf/pku199801/ner.bin 40 | #更多配置项请参考 https://github.com/hankcs/HanLP/blob/master/src/main/java/com/hankcs/hanlp/HanLP.java#L59 自行添加 -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/mapping/Knowledgegraph.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 8 | 18 | 21 | 24 | 25 | 26 | INSERT INTO knowledgegraphdomain(name, createuser,nodecount, shipcount,status) VALUES (#{params.name},#{params.createuser},#{params.nodecount},#{params.shipcount},1) 27 | 28 | 29 | UPDATE knowledgegraphdomain 30 | SET 31 | name =#{params.name} , 32 | createuser=#{params.createuser} , 33 | nodecount=#{params.nodecount}, 34 | shipcount=#{params.shipcount}, 35 | WHERE id = #{params.id} 36 | 37 | 38 | UPDATE knowledgegraphdomain SET status = 0 WHERE id = #{id} 39 | 40 | 41 | 42 | insert knowledgenodedetailfile 43 | (Domainid,NodeId,FileName,ImageType,CreateUser,CreateTime,Status) 44 | values 45 | 46 | (#{item.domainid},#{item.nodeid} ,#{item.file} ,#{item.imagetype},#{item.createuser},#{item.createtime},#{item.status}) 47 | 48 | 49 | 50 | insert knowledgenodedetail 51 | (Domainid,NodeId,Content,CreateUser,CreateTime,Status) 52 | values 53 | (#{params.domainid},#{params.nodeid} ,#{params.content} ,#{params.createuser},#{params.createtime},#{params.status}) 54 | 55 | 56 | UPDATE knowledgenodedetail SET Content =#{params.content} ,ModifyTime=#{params.modifytime} WHERE Domainid=#{params.domainid} and NodeId = #{params.nodeid} 57 | 58 | 61 | 64 | 65 | update knowledgenodedetailfile set Status=0 where Domainid=#{domainid} and NodeId=#{nodeid} 66 | 67 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/css/blog/m.css: -------------------------------------------------------------------------------- 1 | @charset "gb2312"; 2 | @media screen and (min-width: 1024px) and (max-width: 1199px) { 3 | header { width: 100%; margin: auto; } 4 | .nav { width: 90% } 5 | .nav li a { padding: 0 15px } 6 | .search_bar { display: none } 7 | article { width: 90%; margin: 100px auto 0 } 8 | .fader { padding-top: 0; height: 360px } 9 | .imginfo { bottom: 15% } 10 | .toppic li i img { height: 170px; } 11 | .gd { width: 230px } 12 | .container { width: 90%; margin: 20px auto; } 13 | .share ul li i { } 14 | } 15 | @media screen and (min-width: 960px) and (max-width: 1023px) { 16 | header { width: 100%; margin: auto; } 17 | .nav { width: 90% } 18 | .nav li a { padding: 0 15px } 19 | .search_bar { display: none } 20 | article { width: 90%; margin: 100px auto 0 } 21 | .fader { padding-top: 0; height: 337px } 22 | .imginfo { bottom: 15% } 23 | .toppic li i img { height: 160px; } 24 | .gd { width: 220px } 25 | .zhuanti p { top: 20% } 26 | .container { width: 90%; margin: 20px auto; } 27 | .share ul li i { } 28 | .timebox li:hover a { padding: 0 30px } 29 | } 30 | @media screen and (min-width: 768px) and (max-width: 959px) { 31 | header, .blogsbox { width: 100%; margin: auto; } 32 | .menu { width: 90%; display: none; } 33 | #mnav { display: block; z-index: 99999; } 34 | .search_bar { display: none } 35 | article { width: 90%; margin: 70px auto 0 } 36 | .imginfo { bottom: 17% } 37 | .fader { padding-top: 0; height: 279px } 38 | .toppic li i img { height: 130px; } 39 | .sidebar { display: none } 40 | .imginfo { font-size: 22px } 41 | .news_infos { width: 100% } 42 | .container { width: 90%; margin: 20px auto; } 43 | .shareli { margin: 0 5px; } 44 | .share ul li i { height: 150px } 45 | .share ul li h2 { padding: 15px 5px; } 46 | h1.t_nav span { display: none } 47 | .timebox li { font-size: 15px } 48 | .timebox li span { margin-right: 30px } 49 | .timebox:before { left: 100px; } 50 | .timebox li a:after { left: -26px } 51 | .timebox li:hover a { padding: 0 30px } 52 | .infosbox { width: 100% } 53 | .pagebg {margin-top: 50px;} 54 | } 55 | @media only screen and (min-width: 480px) and (max-width: 767px) { 56 | header, .blogsbox, .banner { width: 100%; margin: auto; } 57 | .menu { width: 90%; display: none; } 58 | #mnav { display: block; z-index: 99999; } 59 | .search_bar, .toppic { display: none } 60 | article { width: 90%; margin: 70px auto 0 } 61 | .bloginfo { width: 100%; clear: both } 62 | .blogs .blogpic { width: 30%; height: auto } 63 | .fader { padding-top: 50%; width: 100% } 64 | .imginfo { font-size: 20px; bottom: 20%; } 65 | .bloginfo ul li { margin-right: 10px } 66 | .bplist li img { height: 100px } 67 | .bplist li { height: 100px } 68 | .bigpic img { height: auto } 69 | .blogs .blogtext { margin-top: 10px; } 70 | .bplist li:nth-child(2) { margin-left: 3px; } 71 | .sidebar { display: none } 72 | .news_infos { width: 100% } 73 | h1.t_nav span { display: none } 74 | .container { width: 90%; margin: 20px auto; } 75 | .share ul li { width: 50% } 76 | .share ul li i { height: 150px } 77 | .shareli { margin: 0 15px; } 78 | .share ul li h2 { padding: 15px 15px; } 79 | .timebox li { font-size: 15px } 80 | .timebox li span { display: none } 81 | .timebox:before { display: none } 82 | .timebox li:hover a { padding: 0 30px } 83 | .infosbox { width: 100% } 84 | .pagebg {margin-top: 50px;} 85 | .diggit{ float:none; margin-left: auto;} 86 | .dasbox{ display:none} 87 | } 88 | @media only screen and (max-width: 479px) { 89 | header, .blogsbox, .banner { width: 100%; margin: auto; } 90 | .menu { width: 90%; display: none; } 91 | #mnav { display: block; z-index: 99999; } 92 | .search_bar, .toppic { display: none } 93 | article { width: 90%; margin: 70px auto 0 } 94 | .imginfo { font-size: 18px; bottom: 22%; } 95 | .bloginfo { width: 100%; clear: both; } 96 | .blogs .blogpic { width: 100%; height: auto; margin-bottom: 10px } 97 | .fader { padding-top: 60%; width: 100% } 98 | .bloginfo ul li { margin-right: 10px } 99 | .bplist li img { height: 100px } 100 | .bplist li { height: 100px } 101 | .bigpic img { height: auto } 102 | .blogs .blogtext { width: 100%; } 103 | .bloginfo ul li { background-image: none } 104 | .bloginfo .author, .bloginfo .lmname { display: none } 105 | .bplist li:nth-child(2) { margin: 0 0 0 2px } 106 | .sidebar { display: none } 107 | .news_infos { width: 100% } 108 | h1.t_nav span { display: none } 109 | .container { width: 90%; margin: 20px auto; } 110 | .share ul li { width: 50% } 111 | .share ul li i { height: 150px } 112 | .shareli { margin: 0 3%; } 113 | .share ul li h2 { padding: 15px 15px; } 114 | footer p { padding: 0 30px } 115 | .timebox li { font-size: 15px } 116 | .timebox li span { display: none } 117 | .timebox:before { display: none } 118 | .timebox li:hover a { padding: 0 30px } 119 | .infosbox { width: 100% } 120 | .pagebg {margin-top: 50px;} 121 | .diggit{ float:none; margin-left: auto;} 122 | .dasbox{ display:none} 123 | } 124 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/images/gzbg.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/基于图谱的应用/kgmaker/src/main/resources/static/images/gzbg.jpg -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/images/logo/login_bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/基于图谱的应用/kgmaker/src/main/resources/static/images/logo/login_bg.png -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/images/logo/logo-0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/基于图谱的应用/kgmaker/src/main/resources/static/images/logo/logo-0.png -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/images/logo/logo-3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/基于图谱的应用/kgmaker/src/main/resources/static/images/logo/logo-3.jpg -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/images/logo/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/基于图谱的应用/kgmaker/src/main/resources/static/images/logo/logo.png -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/images/logo/logo_o.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/基于图谱的应用/kgmaker/src/main/resources/static/images/logo/logo_o.png -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/images/tan_weixin_qr_1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/基于图谱的应用/kgmaker/src/main/resources/static/images/tan_weixin_qr_1.jpg -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/js/sidebarAdmin.js: -------------------------------------------------------------------------------- 1 | 2 | $(document).ready(function () { 3 | // 菜单切换 4 | $('.navbar-minimalize').click(function () { 5 | //debugger; 6 | $("body").toggleClass("mini-navbar"); 7 | var has=$("body").hasClass("mini-navbar"); 8 | if(has){ 9 | $('#sideIcon').removeClass('glyphicon-chevron-left').addClass('glyphicon-chevron-right'); 10 | }else{ 11 | $('#sideIcon').removeClass('glyphicon-chevron-right').addClass('glyphicon-chevron-left'); 12 | } 13 | SmoothlyMenu(); 14 | }); 15 | 16 | 17 | // 侧边栏高度 18 | function fix_height() { 19 | var heightWithoutNavbar = $("body > #wrapper").height() - 61; 20 | $(".sidebard-panel").css("min-height", heightWithoutNavbar + "px"); 21 | } 22 | fix_height(); 23 | 24 | $(window).bind("load resize click scroll", function () { 25 | if (!$("body").hasClass('body-small')) { 26 | fix_height(); 27 | } 28 | }); 29 | 30 | //侧边栏滚动 31 | $(window).scroll(function () { 32 | if ($(window).scrollTop() > 0 && !$('body').hasClass('fixed-nav')) { 33 | $('#right-sidebar').addClass('sidebar-top'); 34 | } else { 35 | $('#right-sidebar').removeClass('sidebar-top'); 36 | } 37 | }); 38 | 39 | $('.full-height-scroll').slimScroll({ 40 | height: '100%' 41 | }); 42 | 43 | $('#side-menu>li').click(function () { 44 | if ($('body').hasClass('mini-navbar')) { 45 | NavToggle(); 46 | } 47 | }); 48 | $('#side-menu>li li a').click(function () { 49 | if ($(window).width() < 769) { 50 | NavToggle(); 51 | } 52 | }); 53 | 54 | $('.nav-close').click(NavToggle); 55 | 56 | //ios浏览器兼容性处理 57 | if (/(iPhone|iPad|iPod|iOS)/i.test(navigator.userAgent)) { 58 | $('#content-main').css('overflow-y', 'auto'); 59 | } 60 | 61 | }); 62 | 63 | $(window).bind("load resize", function () { 64 | if ($(this).width() < 769) { 65 | $('body').addClass('mini-navbar'); 66 | $('.navbar-static-side').fadeIn(); 67 | } 68 | }); 69 | 70 | function NavToggle() { 71 | $('.navbar-minimalize').trigger('click'); 72 | } 73 | 74 | function SmoothlyMenu() { 75 | if (!$('body').hasClass('mini-navbar')) { 76 | $('#side-menu').hide(); 77 | setTimeout( 78 | function () { 79 | $('#side-menu').fadeIn(500); 80 | }, 100); 81 | } else if ($('body').hasClass('fixed-sidebar')) { 82 | $('#side-menu').hide(); 83 | setTimeout( 84 | function () { 85 | $('#side-menu').fadeIn(500); 86 | }, 300); 87 | } else { 88 | $('#side-menu').removeAttr('style'); 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/scripts/cy2neod3.js: -------------------------------------------------------------------------------- 1 | function Cy2NeoD3(config, graphId, tableId, sourceId, execId, urlSource, renderGraph, cbResult) { 2 | function createEditor() { 3 | return CodeMirror.fromTextArea(document.getElementById(sourceId), { 4 | parserfile: ["codemirror-cypher.js"], 5 | path: "/scripts", 6 | stylesheet: "/styles/codemirror-neo.css", 7 | autoMatchParens: true, 8 | lineNumbers: true, 9 | enterMode: "keep", 10 | value: "some value" 11 | }); 12 | } 13 | var neod3 = new Neod3Renderer(); 14 | var neo = new Neo(urlSource); 15 | var editor = createEditor(); 16 | $("#"+execId).click(function(evt) { 17 | try { 18 | evt.preventDefault(); 19 | var query = editor.getValue(); 20 | console.log("Executing Query",query); 21 | var execButton = $(this).find('i'); 22 | execButton.toggleClass('fa-play-circle-o fa-spinner fa-spin') 23 | neo.executeQuery(query,{},function(err,res) { 24 | execButton.toggleClass('fa-spinner fa-spin fa-play-circle-o') 25 | res = res || {} 26 | var graph=res.graph; 27 | if (renderGraph) { 28 | if (graph) { 29 | var c=$("#"+graphId); 30 | c.empty(); 31 | neod3.render(graphId, c ,graph); 32 | renderResult(tableId, res.table); 33 | } else { 34 | if (err) { 35 | console.log(err); 36 | if (err.length > 0) { 37 | sweetAlert("Cypher error", err[0].code + "\n" + err[0].message, "error"); 38 | } else { 39 | sweetAlert("Ajax " + err.statusText, "Status " + err.status + ": " + err.state(), "error"); 40 | } 41 | } 42 | } 43 | } 44 | if(cbResult) cbResult(res); 45 | }); 46 | } catch(e) { 47 | console.log(e); 48 | sweetAlert("Catched error", e, "error"); 49 | } 50 | return false; 51 | }); 52 | } 53 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/scripts/cypher.datatable.js: -------------------------------------------------------------------------------- 1 | function convertResult(data) { 2 | var charWidth = 14; 3 | var result = { columns: [], data: []}; 4 | var columns = Object.keys(data[0]); 5 | var count = columns.length; 6 | var rows = data.length; 7 | for (var col = 0; col < count; col++) { 8 | result.columns[col] = {"sTitle": columns[col], sWidth: columns[col].length * charWidth}; 9 | } 10 | for (var row = 0; row < rows; row++) { 11 | var currentRow = data[row]; 12 | var newRow = []; 13 | for (var col = 0; col < count; col++) { 14 | var value = convertCell(currentRow[columns[col]]); 15 | newRow[col] = value; 16 | result.columns[col].sWidth = Math.max(value.length * charWidth, result.columns[col].sWidth); 17 | } 18 | result.data[row] = newRow; 19 | } 20 | var width = 0; 21 | for (var col = 0; col < count; col++) { 22 | width += result.columns[col].sWidth; 23 | } 24 | var windowWith = $(window).width() / 2; 25 | for (var col = 0; col < count; col++) { 26 | // result.columns[col].sWidth=windowWith * result.columns[col].sWidth / width; 27 | result.columns[col].sWidth = "" + Math.round(100 * result.columns[col].sWidth / width) + "%"; 28 | // console.log(result.columns[col].sWidth); 29 | } 30 | return result; 31 | } 32 | 33 | function convertCell(cell) { 34 | if (cell == null) return ""; 35 | if (cell instanceof Array) { 36 | var result = []; 37 | for (var i = 0; i < cell.length; i++) { 38 | result.push(convertCell(cell[i])); 39 | } 40 | return "[" + result.join(", ") + "]"; 41 | } 42 | if (cell instanceof Object) { 43 | if (cell["_type"]) { 44 | return "(" + cell["_start"] + ")-[" + cell["_id"] + ":" + cell["_type"] + props(cell) + "]->(" + cell["_end"] + ")"; 45 | } else 46 | if (typeof(cell["_id"]) !== "undefined") { 47 | var labels = ""; 48 | if (cell["_labels"]) { 49 | labels = ":" + cell["_labels"].join(":"); 50 | } 51 | return "(" + cell["_id"] + labels + props(cell) + ")"; 52 | } 53 | return props(cell); 54 | } 55 | return cell; 56 | } 57 | 58 | function props(cell) { 59 | var props = []; 60 | for (key in cell) { 61 | if (cell.hasOwnProperty(key) && key[0] != '_') { 62 | props.push([key] + ":" + JSON.stringify(cell[key])); 63 | } 64 | } 65 | return props.length ? " {" + props.join(", ") + "}" : ""; 66 | } 67 | 68 | function renderResult(id, data) { 69 | if (!data || !data.length) return; 70 | var result = convertResult(data); 71 | var table = $('
').appendTo($("#" + id)); 72 | var large = result.data.length > 10; 73 | var dataTable = table.dataTable({ 74 | aoColumns: result.columns, 75 | bFilter: large, 76 | bInfo: large, 77 | bLengthChange: large, 78 | bPaginate: large, 79 | aaData: result.data, 80 | // bAutoWidth: true, 81 | aLengthMenu: [ 82 | [10, 25, 50, -1], 83 | [10, 25, 50, "All"] 84 | ], 85 | aaSorting: [], 86 | bSortable: true, 87 | oLanguage: { 88 | oPaginate: { 89 | sNext: " >> ", 90 | sPrevious: " << " 91 | } 92 | } 93 | }); 94 | } 95 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/scripts/neo4d3.js: -------------------------------------------------------------------------------- 1 | function Neo(urlSource) { 2 | function txUrl() { 3 | var connection = urlSource(); 4 | var url = (connection.url || "http://localhost:7474").replace(/\/db\/data.*/,""); 5 | return url + "/db/data/transaction/commit"; 6 | } 7 | var me = { 8 | executeQuery: function(query, params, cb) { 9 | var connection = urlSource(); 10 | var auth = ((connection.user || "") == "") ? "" : "Basic " + btoa(connection.user + ":" + connection.pass); 11 | $.ajax(txUrl(), { 12 | type: "POST", 13 | data: JSON.stringify({ 14 | statements: [{ 15 | statement: query, 16 | parameters: params || {}, 17 | resultDataContents: ["row", "graph"] 18 | }] 19 | }), 20 | contentType: "application/json", 21 | error: function(err) { 22 | cb(err); 23 | }, 24 | beforeSend: function (xhr) { 25 | if (auth && auth.length) xhr.setRequestHeader ("Authorization", auth); 26 | }, 27 | success: function(res) { 28 | if (res.errors.length > 0) { 29 | cb(res.errors); 30 | } else { 31 | var cols = res.results[0].columns; 32 | var rows = res.results[0].data.map(function(row) { 33 | var r = {}; 34 | cols.forEach(function(col, index) { 35 | r[col] = row.row[index]; 36 | }); 37 | return r; 38 | }); 39 | var nodes = []; 40 | var rels = []; 41 | var labels = []; 42 | function findNode(nodes, id) { 43 | for (var i=0;i 0; 51 | if (!found) { 52 | //n.props=n.properties; 53 | for(var p in n.properties||{}) { n[p]=n.properties[p];delete n.properties[p];} 54 | delete n.properties; 55 | nodes.push(n); 56 | labels=labels.concat(n.labels.filter(function(l) { labels.indexOf(l) == -1 })) 57 | } 58 | }); 59 | rels = rels.concat(row.graph.relationships.map( 60 | function(r) { 61 | return { id: r.id, start:r.startNode, end:r.endNode, type:r.type } } 62 | )); 63 | }); 64 | cb(null,{table:rows,graph:{nodes:nodes, links:rels},labels:labels}); 65 | } 66 | } 67 | }); 68 | } 69 | }; 70 | return me; 71 | } 72 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/styles/codemirror-neo.css: -------------------------------------------------------------------------------- 1 | .code-style,#editor .cm-s-neo{font-family:Menlo,"Courier New",Terminal,monospace;font-size:16px;line-height:23px;-webkit-font-smoothing:initial} 2 | .cm-s-neo,.cm-s-neo.cm-s-css{color:#2e383c;} 3 | .cm-s-neo .cm-comment,.cm-s-neo.cm-s-css .cm-comment{color:#75787b} 4 | .cm-s-neo .cm-keyword,.cm-s-neo.cm-s-css .cm-keyword,.cm-s-neo .cm-property,.cm-s-neo.cm-s-css .cm-property{color:#1d75b3} 5 | .cm-s-neo .cm-atom,.cm-s-neo.cm-s-css .cm-atom,.cm-s-neo .cm-number,.cm-s-neo.cm-s-css .cm-number{color:#75438a} 6 | .cm-s-neo .cm-node,.cm-s-neo.cm-s-css .cm-node,.cm-s-neo .cm-tag,.cm-s-neo.cm-s-css .cm-tag{color:#9c3328} 7 | .cm-s-neo .cm-string,.cm-s-neo.cm-s-css .cm-string{color:#b35e14} 8 | .cm-s-neo .cm-variable,.cm-s-neo.cm-s-css .cm-variable,.cm-s-neo .cm-qualifier,.cm-s-neo.cm-s-css .cm-qualifier{color:#047d65} 9 | #grass .cm-s-neo{font-size:14px;line-height:18px} 10 | #editor .cm-s-neo{background-color:transparent;margin:25px 180px 25px 16px;-webkit-transition:all 0.4s;-moz-transition:all 0.4s;-o-transition:all 0.4s;-ms-transition:all 0.4s;transition:all 0.4s;-webkit-transition-timing-function:cubic-bezier(.694,.0482,.335,1);-moz-transition-timing-function:cubic-bezier(.694,.0482,.335,1);-o-transition-timing-function:cubic-bezier(.694,.0482,.335,1);-ms-transition-timing-function:cubic-bezier(.694,.0482,.335,1);transition-timing-function:cubic-bezier(.694,.0482,.335,1);} 11 | #editor .cm-s-neo pre{padding:0} 12 | #editor .cm-s-neo .cm-s-neo-placeholder{color:#e0e2e6} 13 | #editor .cm-s-neo-lines{padding:0} 14 | #editor .cm-s-neo-gutters{border:none;border-right:10px solid transparent;background-color:transparent} 15 | #editor .cm-s-neo-linenumber{padding:0;color:#e0e2e5;opacity:1;-ms-filter:none;filter:none} 16 | #editor .cm-s-neo{height:auto} 17 | #editor .cm-s-neo-scroll{overflow:hidden;max-height:140px} 18 | #editor .cm-s-neo div.cm-s-neo-cursor{border-left:11px solid rgba(155,157,162,0.37);z-index:3} 19 | #editor .cm-s-neo-sizer{-webkit-transition:min-height 0.4s;-moz-transition:min-height 0.4s;-o-transition:min-height 0.4s;-ms-transition:min-height 0.4s;transition:min-height 0.4s;-webkit-transition-timing-function:cubic-bezier(.694,.0482,.335,1);-moz-transition-timing-function:cubic-bezier(.694,.0482,.335,1);-o-transition-timing-function:cubic-bezier(.694,.0482,.335,1);-ms-transition-timing-function:cubic-bezier(.694,.0482,.335,1);transition-timing-function:cubic-bezier(.694,.0482,.335,1)} 20 | #editor .cm-s-neo-scroll div:nth-child(2){-webkit-transition:top 0.4s;-moz-transition:top 0.4s;-o-transition:top 0.4s;-ms-transition:top 0.4s;transition:top 0.4s;-webkit-transition-timing-function:cubic-bezier(.694,.0482,.335,1);-moz-transition-timing-function:cubic-bezier(.694,.0482,.335,1);-o-transition-timing-function:cubic-bezier(.694,.0482,.335,1);-ms-transition-timing-function:cubic-bezier(.694,.0482,.335,1);transition-timing-function:cubic-bezier(.694,.0482,.335,1)} 21 | #editor .prompt{position:absolute;top:24px;left:25px;color:#93969b;opacity:0;-ms-filter:"progid:DXImageTransform.Microsoft.Alpha(Opacity=0)";filter:alpha(opacity=0)} 22 | #editor .one-line .prompt{opacity:1;-ms-filter:none;filter:none} 23 | #editor .one-line .cm-s-neo .cm-s-neo-linenumber{opacity:0;-ms-filter:"progid:DXImageTransform.Microsoft.Alpha(Opacity=0)";filter:alpha(opacity=0)} 24 | #editor .disable-highlighting .cm-s-neo .cm-s-neo-code *{color:#7b7f89} 25 | 26 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/styles/cy2neo.css: -------------------------------------------------------------------------------- 1 | svg { 2 | /* background: #333;*/ 3 | width:100%; 4 | height:100%; 5 | } 6 | .CodeMirror, .CodeMirror-scroll { 7 | height:auto; 8 | } 9 | 10 | #execute { 11 | font-size:30px; 12 | position:absolute; 13 | top: 0.6em; 14 | right: 10em; 15 | } 16 | 17 | #neo4jUrl { 18 | position:absolute; 19 | right: 5px; 20 | top: 1em; 21 | z-index: 100; 22 | width: 20em; 23 | } 24 | 25 | #neo4jUser { 26 | position:absolute; 27 | right: 9em; 28 | top: 4em; 29 | z-index: 100; 30 | width: 8em; 31 | } 32 | 33 | #neo4jPass { 34 | position:absolute; 35 | right: 5px; 36 | top: 4em; 37 | z-index: 100; 38 | width: 8em; 39 | } -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/styles/datatable.css: -------------------------------------------------------------------------------- 1 | table { 2 | border-collapse: collapse; 3 | /* 4 | clear: both; 5 | */ 6 | color: black; 7 | border-left: 2px dashed white; 8 | border-bottom: 2px dashed white; 9 | border-top: 2px dashed white; 10 | min-width: 50%; 11 | padding: 5px; 12 | } 13 | thead { 14 | border-bottom: 2px dashed white; 15 | } 16 | 17 | th, td { 18 | border-right: 2px dashed white; 19 | padding: 5px; 20 | } 21 | 22 | table.dataTable th { 23 | border-bottom-width: 2px; 24 | font-weight: normal; 25 | background-color: #f8f8f8; 26 | } 27 | 28 | .even, thead { 29 | background: #eeeeee; 30 | } 31 | 32 | .odd { 33 | background: #dddddd; 34 | } 35 | 36 | .dataTables_wrapper { 37 | display: inline-block; 38 | max-width: 95%; 39 | min-width: 50%; 40 | z-index: 100; 41 | margin: 2em; 42 | } 43 | 44 | .dataTables_length { 45 | float: left; 46 | } 47 | .dataTables_filter { 48 | text-align: right; 49 | } 50 | .dataTables_info { 51 | float: left; 52 | margin: 3px; 53 | } 54 | 55 | .dataTables_paginate { 56 | margin: 3px; 57 | text-align: right; 58 | } 59 | .paging_two_button { 60 | 61 | } 62 | .paginate_disabled_previous, .paginate_disabled_next { 63 | display: none; 64 | padding: 3px; 65 | } 66 | .paginate_enabled_previous, .paginate_enabled_next { 67 | padding: 3px; 68 | color: #090 69 | } 70 | 71 | .css_right { 72 | float: right; 73 | } 74 | .sorting_asc { 75 | background: url('../img/sort_asc.png') no-repeat center right; 76 | } 77 | 78 | .sorting_desc { 79 | background: url('../img/sort_desc.png') no-repeat center right; 80 | } 81 | 82 | .sorting { 83 | background: url('../img/sort_both.png') no-repeat center right; 84 | } 85 | 86 | .sorting_asc_disabled { 87 | background: url('../img/sort_asc_disabled.png') no-repeat center right; 88 | } 89 | 90 | .sorting_desc_disabled { 91 | background: url('../img/sort_desc_disabled.png') no-repeat center right; 92 | } 93 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/styles/fonts/FontAwesome.otf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/基于图谱的应用/kgmaker/src/main/resources/static/styles/fonts/FontAwesome.otf -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/styles/fonts/fontawesome-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/基于图谱的应用/kgmaker/src/main/resources/static/styles/fonts/fontawesome-webfont.eot -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/styles/fonts/fontawesome-webfont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/基于图谱的应用/kgmaker/src/main/resources/static/styles/fonts/fontawesome-webfont.ttf -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/styles/fonts/fontawesome-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/基于图谱的应用/kgmaker/src/main/resources/static/styles/fonts/fontawesome-webfont.woff -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/styles/gh-fork-ribbon.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * "Fork me on GitHub" CSS ribbon v0.1.1 | MIT License 3 | * https://github.com/simonwhitaker/github-fork-ribbon-css 4 | */ 5 | 6 | /* Left will inherit from right (so we don't need to duplicate code) */ 7 | .github-fork-ribbon { 8 | /* The right and left classes determine the side we attach our banner to */ 9 | position: absolute; 10 | 11 | /* Add a bit of padding to give some substance outside the "stitching" */ 12 | padding: 2px 0; 13 | 14 | /* Set the base colour */ 15 | background-color: #a00; 16 | 17 | /* Set a gradient: transparent black at the top to almost-transparent black at the bottom */ 18 | background-image: -webkit-gradient(linear, left top, left bottom, from(rgba(0, 0, 0, 0)), to(rgba(0, 0, 0, 0.15))); 19 | background-image: -webkit-linear-gradient(top, rgba(0, 0, 0, 0), rgba(0, 0, 0, 0.15)); 20 | background-image: -moz-linear-gradient(top, rgba(0, 0, 0, 0), rgba(0, 0, 0, 0.15)); 21 | background-image: -ms-linear-gradient(top, rgba(0, 0, 0, 0), rgba(0, 0, 0, 0.15)); 22 | background-image: -o-linear-gradient(top, rgba(0, 0, 0, 0), rgba(0, 0, 0, 0.15)); 23 | background-image: linear-gradient(to bottom, rgba(0, 0, 0, 0), rgba(0, 0, 0, 0.15)); 24 | 25 | /* Add a drop shadow */ 26 | -webkit-box-shadow: 0 2px 3px 0 rgba(0, 0, 0, 0.5); 27 | -moz-box-shadow: 0 2px 3px 0 rgba(0, 0, 0, 0.5); 28 | box-shadow: 0 2px 3px 0 rgba(0, 0, 0, 0.5); 29 | 30 | /* Set the font */ 31 | font: 700 13px "Helvetica Neue", Helvetica, Arial, sans-serif; 32 | 33 | z-index: 9999; 34 | pointer-events: auto; 35 | } 36 | 37 | .github-fork-ribbon a, 38 | .github-fork-ribbon a:hover { 39 | /* Set the text properties */ 40 | color: #fff; 41 | text-decoration: none; 42 | text-shadow: 0 -1px rgba(0, 0, 0, 0.5); 43 | text-align: center; 44 | 45 | /* Set the geometry. If you fiddle with these you'll also need 46 | to tweak the top and right values in .github-fork-ribbon. */ 47 | width: 200px; 48 | line-height: 20px; 49 | 50 | /* Set the layout properties */ 51 | display: inline-block; 52 | padding: 2px 0; 53 | 54 | /* Add "stitching" effect */ 55 | border-width: 1px 0; 56 | border-style: dotted; 57 | border-color: #fff; 58 | border-color: rgba(255, 255, 255, 0.7); 59 | } 60 | 61 | .github-fork-ribbon-wrapper { 62 | width: 150px; 63 | height: 150px; 64 | position: absolute; 65 | overflow: hidden; 66 | top: 0; 67 | z-index: 9999; 68 | pointer-events: none; 69 | } 70 | 71 | .github-fork-ribbon-wrapper.fixed { 72 | position: fixed; 73 | } 74 | 75 | .github-fork-ribbon-wrapper.left { 76 | left: 0; 77 | } 78 | 79 | .github-fork-ribbon-wrapper.right { 80 | right: 0; 81 | } 82 | 83 | .github-fork-ribbon-wrapper.left-bottom { 84 | position: fixed; 85 | top: inherit; 86 | bottom: 0; 87 | left: 0; 88 | } 89 | 90 | .github-fork-ribbon-wrapper.right-bottom { 91 | position: fixed; 92 | top: inherit; 93 | bottom: 0; 94 | right: 0; 95 | } 96 | 97 | .github-fork-ribbon-wrapper.right .github-fork-ribbon { 98 | top: 42px; 99 | right: -43px; 100 | 101 | -webkit-transform: rotate(45deg); 102 | -moz-transform: rotate(45deg); 103 | -ms-transform: rotate(45deg); 104 | -o-transform: rotate(45deg); 105 | transform: rotate(45deg); 106 | } 107 | 108 | .github-fork-ribbon-wrapper.left .github-fork-ribbon { 109 | top: 42px; 110 | left: -43px; 111 | 112 | -webkit-transform: rotate(-45deg); 113 | -moz-transform: rotate(-45deg); 114 | -ms-transform: rotate(-45deg); 115 | -o-transform: rotate(-45deg); 116 | transform: rotate(-45deg); 117 | } 118 | 119 | 120 | .github-fork-ribbon-wrapper.left-bottom .github-fork-ribbon { 121 | top: 80px; 122 | left: -43px; 123 | 124 | -webkit-transform: rotate(45deg); 125 | -moz-transform: rotate(45deg); 126 | -ms-transform: rotate(45deg); 127 | -o-transform: rotate(45deg); 128 | transform: rotate(45deg); 129 | } 130 | 131 | .github-fork-ribbon-wrapper.right-bottom .github-fork-ribbon { 132 | top: 80px; 133 | right: -43px; 134 | 135 | -webkit-transform: rotate(-45deg); 136 | -moz-transform: rotate(-45deg); 137 | -ms-transform: rotate(-45deg); 138 | -o-transform: rotate(-45deg); 139 | transform: rotate(-45deg); 140 | } 141 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/styles/images/maze-black.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/基于图谱的应用/kgmaker/src/main/resources/static/styles/images/maze-black.png -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/styles/neod3.css: -------------------------------------------------------------------------------- 1 | path.link{fill:none;stroke:#7f8c8d;stroke-width:2px} 2 | text{font-family:sans-serif;pointer-events:none} 3 | marker{fill:#d8dadc} 4 | text .faded{fill:#333} 5 | .node .outline{-webkit-transform:scale(1);-moz-transform:scale(1);-o-transform:scale(1);-ms-transform:scale(1);transform:scale(1);-webkit-transition:all .15s;-moz-transition:all .15s;-o-transition:all .15s;-ms-transition:all .15s;transition:all .15s;-webkit-transition-timing-function:cubic-bezier(.694,.0482,.335,1);-moz-transition-timing-function:cubic-bezier(.694,.0482,.335,1);-o-transition-timing-function:cubic-bezier(.694,.0482,.335,1);-ms-transition-timing-function:cubic-bezier(.694,.0482,.335,1);transition-timing-function:cubic-bezier(.694,.0482,.335,1)} 6 | .node:hover .outline{-webkit-transform:scale(1.4);-moz-transform:scale(1.4);-o-transform:scale(1.4);-ms-transform:scale(1.4);transform:scale(1.4)} 7 | .relationship:hover{stroke:#3498db} 8 | .outline,.overlay{cursor:pointer} 9 | g.node text[stroke="#DFE1E3"] {text-shadow: -1px -1px 2px #DFE1E3, -1px 1px 2px #DFE1E3, -1px 0 2px #DFE1E3, 1px -1px 2px #DFE1E3, 1px 1px 2px #DFE1E3, 1px 0 2px #DFE1E3, 0 -1px 2px #DFE1E3, 0 1px 2px #DFE1E3} 10 | g.node text[stroke="#F25A29"] {text-shadow: -1px -1px 2px #F25A29, -1px 1px 2px #F25A29, -1px 0 2px #F25A29, 1px -1px 2px #F25A29, 1px 1px 2px #F25A29, 1px 0 2px #F25A29, 0 -1px 2px #F25A29, 0 1px 2px #F25A29} 11 | g.node text[stroke="#AD62CE"] {text-shadow: -1px -1px 2px #AD62CE, -1px 1px 2px #AD62CE, -1px 0 2px #AD62CE, 1px -1px 2px #AD62CE, 1px 1px 2px #AD62CE, 1px 0 2px #AD62CE, 0 -1px 2px #AD62CE, 0 1px 2px #AD62CE} 12 | g.node text[stroke="#30B6AF"] {text-shadow: -1px -1px 2px #30B6AF, -1px 1px 2px #30B6AF, -1px 0 2px #30B6AF, 1px -1px 2px #30B6AF, 1px 1px 2px #30B6AF, 1px 0 2px #30B6AF, 0 -1px 2px #30B6AF, 0 1px 2px #30B6AF} 13 | g.node text[stroke="#FCC940"] {text-shadow: -1px -1px 2px #FCC940, -1px 1px 2px #FCC940, -1px 0 2px #FCC940, 1px -1px 2px #FCC940, 1px 1px 2px #FCC940, 1px 0 2px #FCC940, 0 -1px 2px #FCC940, 0 1px 2px #FCC940} 14 | g.node text[stroke="#4356C0"] {text-shadow: -1px -1px 2px #4356C0, -1px 1px 2px #4356C0, -1px 0 2px #4356C0, 1px -1px 2px #4356C0, 1px 1px 2px #4356C0, 1px 0 2px #4356C0, 0 -1px 2px #4356C0, 0 1px 2px #4356C0} 15 | g.node text[stroke="#FF6C7C"] {text-shadow: -1px -1px 2px #FF6C7C, -1px 1px 2px #FF6C7C, -1px 0 2px #FF6C7C, 1px -1px 2px #FF6C7C, 1px 1px 2px #FF6C7C, 1px 0 2px #FF6C7C, 0 -1px 2px #FF6C7C, 0 1px 2px #FF6C7C} 16 | g.node text[stroke="#a2cf81"] {text-shadow: -1px -1px 2px #a2cf81, -1px 1px 2px #a2cf81, -1px 0 2px #a2cf81, 1px -1px 2px #a2cf81, 1px 1px 2px #a2cf81, 1px 0 2px #a2cf81, 0 -1px 2px #a2cf81, 0 1px 2px #a2cf81} 17 | g.node text[stroke="#f79235"] {text-shadow: -1px -1px 2px #f79235, -1px 1px 2px #f79235, -1px 0 2px #f79235, 1px -1px 2px #f79235, 1px 1px 2px #f79235, 1px 0 2px #f79235, 0 -1px 2px #f79235, 0 1px 2px #f79235} 18 | g.node text[stroke="#785cc7"] {text-shadow: -1px -1px 2px #785cc7, -1px 1px 2px #785cc7, -1px 0 2px #785cc7, 1px -1px 2px #785cc7, 1px 1px 2px #785cc7, 1px 0 2px #785cc7, 0 -1px 2px #785cc7, 0 1px 2px #785cc7} 19 | g.node text[stroke="#d05e7c"] {text-shadow: -1px -1px 2px #d05e7c, -1px 1px 2px #d05e7c, -1px 0 2px #d05e7c, 1px -1px 2px #d05e7c, 1px 1px 2px #d05e7c, 1px 0 2px #d05e7c, 0 -1px 2px #d05e7c, 0 1px 2px #d05e7c} 20 | g.node text[stroke="#3986b7"] {text-shadow: -1px -1px 2px #3986b7, -1px 1px 2px #3986b7, -1px 0 2px #3986b7, 1px -1px 2px #3986b7, 1px 1px 2px #3986b7, 1px 0 2px #3986b7, 0 -1px 2px #3986b7, 0 1px 2px #3986b7} 21 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/static/wangeditor/fonts/w-e-icon.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OS-ABC/AIOps-Knowledge-Graph-WebData/5def69bbfe720f03a749b92d5226f9e8867989f0/基于图谱的应用/kgmaker/src/main/resources/static/wangeditor/fonts/w-e-icon.woff -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/templates/kg/popse.html: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | 5 | 知识图谱可视化 6 | 7 | 10 | 11 | 12 | 13 |
14 | 15 | 分词 16 | 17 | 18 | 21 | 22 | 23 | 26 | 27 | 28 | 31 | 32 |
19 | {{m.nature}} 20 |
24 | {{m.word}} 25 |
29 | {{m.pos}} 30 |
33 |
34 | 35 | 36 |
37 | 38 | 75 |
76 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/templates/kg/test.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | demo 7 | 8 | 31 | 32 | 33 | 34 |
35 |
36 |
37 | 38 |
39 |
40 | 41 |
42 |
43 | 44 |
45 |
46 |
47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/templates/share/focus.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 |
6 |
7 |

关注我 么么哒!

8 | 13 |
14 |
15 | 16 | 17 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/templates/share/footer.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/templates/share/header.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 |
5 | 23 | 24 | 25 | 26 |
27 | 28 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/templates/share/header.html___jb_tmp___: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 |
5 | 23 |
24 | 25 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/templates/share/headerAdmin.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 |
6 | 7 |
8 | 59 |
60 |
61 | 62 | 63 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/templates/share/layout.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 首页 7 | 8 | 9 | 10 | 13 | 14 | 15 | 16 | 17 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 |
30 | 31 | 32 |
33 | 34 |
35 | 36 | 37 | 38 |
39 | 86 | 87 | 88 |
89 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/templates/share/layout.html___jb_tmp___: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 首页 7 | 8 | 9 | 10 | 13 | 14 | 15 | 16 | 17 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 |
30 | 31 | 32 |
33 | 34 |
35 | 36 | 37 | 38 |
39 | 86 | 87 | 88 |
89 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/templates/share/layout3.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 首页 7 | 8 | 9 | 10 | 11 | 12 | 15 | 16 | 18 | 19 | 20 | 21 | 31 | 32 | 33 | 34 | 36 | 37 | 38 | 39 | 47 | 48 | 49 | 50 |
51 | 52 | 53 |
54 | 55 |
56 |
57 | 123 | 124 | 125 |
126 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/main/resources/templates/share/layoutAdmin.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 首页 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 31 | 34 | 35 | 36 | 37 |
38 | 39 | 40 | 41 |
42 | 43 |
44 |
45 | 46 |
47 |
48 |
49 |
50 |
51 | 52 |
53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 72 | 113 | 114 |
115 | 116 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/test/java/com/warmer/kgmaker/HandLPTest.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker; 2 | 3 | import java.util.List; 4 | 5 | import org.junit.Test; 6 | import org.junit.runner.RunWith; 7 | import org.springframework.boot.test.context.SpringBootTest; 8 | import org.springframework.test.context.junit4.SpringRunner; 9 | 10 | import com.hankcs.hanlp.HanLP; 11 | import com.hankcs.hanlp.dictionary.CustomDictionary; 12 | import com.hankcs.hanlp.seg.Segment; 13 | import com.hankcs.hanlp.seg.common.Term; 14 | 15 | /*@RunWith(SpringRunner.class) 16 | @SpringBootTest*/ 17 | public class HandLPTest { 18 | @Test 19 | public void TestA(){ 20 | String lineStr = "明天虽然会下雨,但是我还是会看周杰伦的演唱会。"; 21 | String lineStr2 = "明天虽然会好热,但是我还是会看周杰伦的演唱会。"; 22 | String lineStr3 = "明天虽然会非常热,但是我还是会看周杰伦的演唱会。"; 23 | try{ 24 | Segment segment = HanLP.newSegment(); 25 | segment.enableCustomDictionary(true); 26 | /** 27 | * 自定义分词+词性 28 | */ 29 | //CustomDictionary.add("好热","ng 0"); 30 | CustomDictionary.add("非常热","ng 0"); 31 | List seg = segment.seg(lineStr); 32 | for (Term term : seg) { 33 | System.out.println(term.toString()); 34 | } 35 | List seg2 = segment.seg(lineStr2); 36 | for (Term term : seg2) { 37 | System.out.println(term.toString()); 38 | } 39 | List seg3 = segment.seg(lineStr3); 40 | for (Term term : seg3) { 41 | System.out.println(term.toString()); 42 | } 43 | }catch(Exception ex){ 44 | System.out.println(ex.getClass()+","+ex.getMessage()); 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /基于图谱的应用/kgmaker/src/test/java/com/warmer/kgmaker/KgmakerApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.warmer.kgmaker; 2 | 3 | import com.alibaba.fastjson.JSON; 4 | import com.warmer.kgmaker.util.Neo4jUtil; 5 | import org.junit.Test; 6 | import org.junit.runner.RunWith; 7 | import org.springframework.beans.factory.annotation.Autowired; 8 | import org.springframework.boot.test.context.SpringBootTest; 9 | import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; 10 | import org.springframework.test.context.web.WebAppConfiguration; 11 | 12 | import java.io.File; 13 | import java.util.HashMap; 14 | 15 | @RunWith(SpringJUnit4ClassRunner.class) // SpringJUnit支持,由此引入Spring-Test框架支持! 16 | @SpringBootTest(classes = KgmakerApplication.class) // 指定我们SpringBoot工程的Application启动类 17 | @WebAppConfiguration // 由于是Web项目,Junit需要模拟ServletContext,因此我们需要给我们的测试类加上@WebAppConfiguration。 18 | public class KgmakerApplicationTests { 19 | 20 | @Autowired 21 | private Neo4jUtil neo4jUtil; 22 | @Test 23 | public void contextLoads() { 24 | String cyphersql="MATCH (n:`贵州`) WHERE n.name='交通事件' " + 25 | "CALL apoc.path.spanningTree(n, {maxLevel:3}) YIELD path" + 26 | " RETURN path"; 27 | HashMap result=neo4jUtil.GetGraphNodeAndShip(cyphersql); 28 | System.out.println(JSON.toJSON(result)); 29 | } 30 | @Test 31 | public void TestMakeDir(){ 32 | String strPath = "D:\\tan\\tan.txt"; 33 | File file = new File(strPath); 34 | if(!file.exists()){ 35 | file.mkdirs(); 36 | } 37 | } 38 | } 39 | --------------------------------------------------------------------------------