├── monitor ├── src │ ├── monitor.egg-info │ │ ├── not-zip-safe │ │ ├── dependency_links.txt │ │ ├── top_level.txt │ │ ├── requires.txt │ │ ├── entry_points.txt │ │ ├── PKG-INFO │ │ └── SOURCES.txt │ └── monitor │ │ ├── static │ │ ├── art │ │ │ ├── logo.png │ │ │ ├── go_top.png │ │ │ ├── di_portal.png │ │ │ ├── favicon.ico │ │ │ ├── di_portalbkp.png │ │ │ ├── login_background.png │ │ │ ├── ui-icons_222222_256x240.png │ │ │ ├── ui-icons_2e83ff_256x240.png │ │ │ ├── ui-icons_454545_256x240.png │ │ │ ├── ui-icons_888888_256x240.png │ │ │ ├── ui-icons_cd0a0a_256x240.png │ │ │ ├── ui-bg_flat_0_aaaaaa_40x100.png │ │ │ ├── ui-bg_flat_75_ffffff_40x100.png │ │ │ ├── ui-bg_glass_55_fbf9ee_1x400.png │ │ │ ├── ui-bg_glass_65_ffffff_1x400.png │ │ │ ├── ui-bg_glass_75_dadada_1x400.png │ │ │ ├── ui-bg_glass_75_e6e6e6_1x400.png │ │ │ ├── ui-bg_glass_95_fef1ec_1x400.png │ │ │ └── ui-bg_highlight-soft_75_cccccc_1x100.png │ │ ├── help │ │ │ └── index.md │ │ ├── css │ │ │ ├── jquery.autocomplete.css │ │ │ └── di_portal.css │ │ ├── fonts │ │ │ ├── glyphicons-halflings-regular.eot │ │ │ ├── glyphicons-halflings-regular.ttf │ │ │ └── glyphicons-halflings-regular.woff │ │ └── js │ │ │ ├── di_portal.js │ │ │ ├── fnSetFilteringDelay.js │ │ │ ├── jquery.livesearch.min.js │ │ │ ├── date.js │ │ │ ├── combobox.js │ │ │ └── jquery.cookie.js │ │ ├── locale │ │ ├── es │ │ │ └── LC_MESSAGES │ │ │ │ └── django.mo │ │ └── zh_CN │ │ │ └── LC_MESSAGES │ │ │ └── django.mo │ │ ├── templates │ │ └── shared_components.mako │ │ ├── forms.py │ │ ├── __init__.py │ │ ├── models.py │ │ ├── settings.py │ │ ├── urls.py │ │ ├── views.py │ │ └── conf.py ├── Makefile └── setup.py ├── di-data-service ├── .gitignore ├── src │ ├── main │ │ ├── webapp │ │ │ ├── META-INF │ │ │ │ └── MANIFEST.MF │ │ │ ├── index.jsp │ │ │ └── WEB-INF │ │ │ │ ├── web.xml │ │ │ │ └── rest-servlet.xml │ │ ├── java │ │ │ └── com │ │ │ │ └── ctrip │ │ │ │ └── di │ │ │ │ ├── dao │ │ │ │ ├── user │ │ │ │ │ ├── AuthUserMapper.java │ │ │ │ │ ├── StatisticsUserMapper.java │ │ │ │ │ ├── AuthUser.java │ │ │ │ │ └── StatisticsUser.java │ │ │ │ ├── spark │ │ │ │ │ ├── SparkJobMapper.java │ │ │ │ │ └── SparkJob.java │ │ │ │ ├── hive │ │ │ │ │ ├── HiveJobsMapper.java │ │ │ │ │ ├── HiveJobsService.java │ │ │ │ │ └── HiveJobsDo.java │ │ │ │ ├── YarnJobUserCountDo.java │ │ │ │ ├── YarnUserJobDo.java │ │ │ │ ├── YarnJobsMapper.java │ │ │ │ ├── YarnJobsDo.java │ │ │ │ ├── YarnJobService.java │ │ │ │ └── YarnJobCountDo.java │ │ │ │ ├── common │ │ │ │ ├── authenticate │ │ │ │ │ ├── IAuthenticate.java │ │ │ │ │ ├── LdapAuthenticate.java │ │ │ │ │ └── MessageDigestAuthenticate.java │ │ │ │ ├── MetricConfigParser.java │ │ │ │ ├── util │ │ │ │ │ ├── PrintWriterUtil.java │ │ │ │ │ └── UrlUtils.java │ │ │ │ └── jmx │ │ │ │ │ ├── YarnJmxBean.java │ │ │ │ │ ├── YarnJmxService.java │ │ │ │ │ ├── AbstractJmxService.java │ │ │ │ │ ├── HdfsJmxService.java │ │ │ │ │ └── HdfsJmxBean.java │ │ │ │ ├── hive │ │ │ │ ├── util │ │ │ │ │ ├── HDFSHelper.java │ │ │ │ │ └── HiveHelper.java │ │ │ │ └── alert │ │ │ │ │ └── EMail.java │ │ │ │ ├── hdfs │ │ │ │ ├── HdfsDirSummary.java │ │ │ │ └── HdfsFileSummaryService.java │ │ │ │ ├── controller │ │ │ │ ├── HdfsController.java │ │ │ │ ├── MetricsController.java │ │ │ │ ├── AuthUserController.java │ │ │ │ ├── SparkController.java │ │ │ │ └── GangliaController.java │ │ │ │ ├── ganglia │ │ │ │ └── GangliaHttpParser.java │ │ │ │ ├── pojo │ │ │ │ └── gen │ │ │ │ │ ├── HostList.java │ │ │ │ │ ├── Metrics.java │ │ │ │ │ ├── Clusters.java │ │ │ │ │ ├── ObjectFactory.java │ │ │ │ │ └── Cluster.java │ │ │ │ ├── spark │ │ │ │ └── SparkService.java │ │ │ │ └── yarn │ │ │ │ └── YarnJobCrawlerTask.java │ │ └── resources │ │ │ ├── conf │ │ │ ├── mybatis │ │ │ │ ├── auth_user.xml │ │ │ │ ├── statistics_user.xml │ │ │ │ ├── mybatis-config.xml │ │ │ │ ├── SparkJobs.xml │ │ │ │ └── hiveJobs.xml │ │ │ ├── hdfs │ │ │ │ └── test-hdfs-client-conf.xml │ │ │ ├── hive │ │ │ │ └── test-hive-site.xml │ │ │ ├── di.properties │ │ │ └── metric │ │ │ │ └── portal_hadoop_config_test.xml │ │ │ ├── script │ │ │ └── di.sql │ │ │ ├── metric.xsd │ │ │ └── log4j.properties │ └── test │ │ ├── java │ │ └── com │ │ │ └── ctrip │ │ │ └── di │ │ │ ├── common │ │ │ └── jmx │ │ │ │ └── TestHdfsJmxService.java │ │ │ ├── hive │ │ │ ├── alert │ │ │ │ └── TestEMail.java │ │ │ ├── TestHiveCheckService.java │ │ │ ├── TestHDFSHelper.java │ │ │ ├── TestHiveHelper.java │ │ │ └── TestHiveJob.java │ │ │ ├── spark │ │ │ └── TestSparkJdbc.java │ │ │ ├── ganglia │ │ │ ├── TestGangliaMetricService.java │ │ │ └── TestGangliaHttpParser.java │ │ │ ├── TestMain.java │ │ │ └── TestJmx.java │ │ └── resources │ │ └── rest-servlet.xml ├── docs │ ├── di-data-service.png │ └── Data Service for Hadoop cluster.docx ├── README.md ├── LICENSE ├── .classpath └── .project ├── sparksql ├── src │ ├── sparksql.egg-info │ │ ├── not-zip-safe │ │ ├── dependency_links.txt │ │ ├── top_level.txt │ │ ├── requires.txt │ │ ├── entry_points.txt │ │ ├── PKG-INFO │ │ └── SOURCES.txt │ └── sparksql │ │ ├── static │ │ ├── art │ │ │ ├── loads.gif │ │ │ └── sparksql.png │ │ ├── help │ │ │ └── index.md │ │ ├── css │ │ │ └── sparksql.css │ │ └── js │ │ │ └── loading.js │ │ ├── templates │ │ ├── shared_components.mako │ │ ├── jobs.mako │ │ └── index.mako │ │ ├── __init__.py │ │ ├── forms.py │ │ ├── models.py │ │ ├── settings.py │ │ ├── urls.py │ │ ├── views.py │ │ └── conf.py ├── Makefile └── setup.py ├── docs ├── sparksql.png ├── di-data-service.png ├── hdfs_dashboard.png ├── metric_monitor.png ├── mapreduce_dashboard.png └── Data Service for Hadoop cluster.docx ├── .classpath ├── .project ├── LICENSE └── README.md /monitor/src/monitor.egg-info/not-zip-safe: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /di-data-service/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | *.class 3 | -------------------------------------------------------------------------------- /sparksql/src/sparksql.egg-info/not-zip-safe: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /monitor/src/monitor.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /monitor/src/monitor.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | monitor 2 | -------------------------------------------------------------------------------- /sparksql/src/sparksql.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /sparksql/src/sparksql.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | sparksql 2 | -------------------------------------------------------------------------------- /monitor/src/monitor.egg-info/requires.txt: -------------------------------------------------------------------------------- 1 | setuptools 2 | desktop -------------------------------------------------------------------------------- /sparksql/src/sparksql.egg-info/requires.txt: -------------------------------------------------------------------------------- 1 | setuptools 2 | desktop -------------------------------------------------------------------------------- /docs/sparksql.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/docs/sparksql.png -------------------------------------------------------------------------------- /di-data-service/src/main/webapp/META-INF/MANIFEST.MF: -------------------------------------------------------------------------------- 1 | Manifest-Version: 1.0 2 | Class-Path: 3 | 4 | -------------------------------------------------------------------------------- /docs/di-data-service.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/docs/di-data-service.png -------------------------------------------------------------------------------- /docs/hdfs_dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/docs/hdfs_dashboard.png -------------------------------------------------------------------------------- /docs/metric_monitor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/docs/metric_monitor.png -------------------------------------------------------------------------------- /monitor/src/monitor.egg-info/entry_points.txt: -------------------------------------------------------------------------------- 1 | [desktop.sdk.application] 2 | monitor=monitor 3 | 4 | -------------------------------------------------------------------------------- /sparksql/src/sparksql.egg-info/entry_points.txt: -------------------------------------------------------------------------------- 1 | [desktop.sdk.application] 2 | sparksql=sparksql 3 | 4 | -------------------------------------------------------------------------------- /docs/mapreduce_dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/docs/mapreduce_dashboard.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/logo.png -------------------------------------------------------------------------------- /di-data-service/docs/di-data-service.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/di-data-service/docs/di-data-service.png -------------------------------------------------------------------------------- /docs/Data Service for Hadoop cluster.docx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/docs/Data Service for Hadoop cluster.docx -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/go_top.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/go_top.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/di_portal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/di_portal.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/favicon.ico -------------------------------------------------------------------------------- /sparksql/src/sparksql/static/art/loads.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/sparksql/src/sparksql/static/art/loads.gif -------------------------------------------------------------------------------- /monitor/src/monitor/static/help/index.md: -------------------------------------------------------------------------------- 1 | Help for your app, written in [MarkDown](http://daringfireball.net/projects/markdown/syntax) syntax. 2 | -------------------------------------------------------------------------------- /sparksql/src/sparksql/static/art/sparksql.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/sparksql/src/sparksql/static/art/sparksql.png -------------------------------------------------------------------------------- /sparksql/src/sparksql/static/help/index.md: -------------------------------------------------------------------------------- 1 | Help for your app, written in [MarkDown](http://daringfireball.net/projects/markdown/syntax) syntax. 2 | -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/di_portalbkp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/di_portalbkp.png -------------------------------------------------------------------------------- /monitor/src/monitor/locale/es/LC_MESSAGES/django.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/locale/es/LC_MESSAGES/django.mo -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/login_background.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/login_background.png -------------------------------------------------------------------------------- /monitor/src/monitor/locale/zh_CN/LC_MESSAGES/django.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/locale/zh_CN/LC_MESSAGES/django.mo -------------------------------------------------------------------------------- /monitor/src/monitor/static/css/jquery.autocomplete.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/css/jquery.autocomplete.css -------------------------------------------------------------------------------- /di-data-service/docs/Data Service for Hadoop cluster.docx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/di-data-service/docs/Data Service for Hadoop cluster.docx -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/ui-icons_222222_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/ui-icons_222222_256x240.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/ui-icons_2e83ff_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/ui-icons_2e83ff_256x240.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/ui-icons_454545_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/ui-icons_454545_256x240.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/ui-icons_888888_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/ui-icons_888888_256x240.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/ui-icons_cd0a0a_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/ui-icons_cd0a0a_256x240.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/ui-bg_flat_0_aaaaaa_40x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/ui-bg_flat_0_aaaaaa_40x100.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/ui-bg_flat_75_ffffff_40x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/ui-bg_flat_75_ffffff_40x100.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/ui-bg_glass_55_fbf9ee_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/ui-bg_glass_55_fbf9ee_1x400.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/ui-bg_glass_65_ffffff_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/ui-bg_glass_65_ffffff_1x400.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/ui-bg_glass_75_dadada_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/ui-bg_glass_75_dadada_1x400.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/ui-bg_glass_75_e6e6e6_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/ui-bg_glass_75_e6e6e6_1x400.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/ui-bg_glass_95_fef1ec_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/ui-bg_glass_95_fef1ec_1x400.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/fonts/glyphicons-halflings-regular.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/fonts/glyphicons-halflings-regular.eot -------------------------------------------------------------------------------- /monitor/src/monitor/static/fonts/glyphicons-halflings-regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/fonts/glyphicons-halflings-regular.ttf -------------------------------------------------------------------------------- /monitor/src/monitor/static/fonts/glyphicons-halflings-regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/fonts/glyphicons-halflings-regular.woff -------------------------------------------------------------------------------- /monitor/src/monitor/static/art/ui-bg_highlight-soft_75_cccccc_1x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arch/Hue-Ctrip-DI/master/monitor/src/monitor/static/art/ui-bg_highlight-soft_75_cccccc_1x100.png -------------------------------------------------------------------------------- /monitor/src/monitor/static/js/di_portal.js: -------------------------------------------------------------------------------- 1 | /* 2 | version = 1.0 3 | 4 | version = 1.1 , by 2014-10-11 5 | 6 | version = 1.1.1 ,by 2014-10-14 7 | 8 | version = 1.1.2 , by 2014-10-16 9 | 10 | version = 1.1.3 ,by 2014-10-24 11 | */ -------------------------------------------------------------------------------- /monitor/src/monitor.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 1.0 2 | Name: monitor 3 | Version: 0.1 4 | Summary: TODO 5 | Home-page: TODO 6 | Author: TODO 7 | Author-email: UNKNOWN 8 | License: UNKNOWN 9 | Description: UNKNOWN 10 | Platform: UNKNOWN 11 | -------------------------------------------------------------------------------- /sparksql/src/sparksql.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 1.0 2 | Name: sparksql 3 | Version: 0.1 4 | Summary: TODO 5 | Home-page: TODO 6 | Author: TODO 7 | Author-email: UNKNOWN 8 | License: UNKNOWN 9 | Description: UNKNOWN 10 | Platform: UNKNOWN 11 | -------------------------------------------------------------------------------- /di-data-service/src/test/java/com/ctrip/di/common/jmx/TestHdfsJmxService.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.common.jmx; 2 | 3 | public class TestHdfsJmxService { 4 | 5 | public static void main(String[] args) throws Exception { 6 | HdfsJmxService test = new HdfsJmxService(); 7 | test.getJmxBean(); 8 | } 9 | 10 | } 11 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/dao/user/AuthUserMapper.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.dao.user; 2 | 3 | import org.springframework.stereotype.Repository; 4 | 5 | /** 6 | * Authenticat user Mapper 7 | * @author xgliao 8 | * 9 | */ 10 | @Repository 11 | public interface AuthUserMapper { 12 | 13 | public AuthUser getAuthUser(String username); 14 | 15 | } 16 | -------------------------------------------------------------------------------- /di-data-service/src/test/java/com/ctrip/di/hive/alert/TestEMail.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.hive.alert; 2 | 3 | import org.junit.Test; 4 | 5 | public class TestEMail { 6 | 7 | @Test 8 | public void test() throws Exception { 9 | String subject = "测试邮件"; 10 | 11 | String content = "有任何疑问可以发邮件到 cdi-hadoop@Ctrip.com"; 12 | 13 | EMail.sendMail(subject, content, "xgliao@ctrip.com", null); 14 | } 15 | 16 | } 17 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/common/authenticate/IAuthenticate.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.common.authenticate; 2 | 3 | /** 4 | * 5 | * Authenticate, provide a API for user to authenticate if the user is valid 6 | * in di portal, if it is valid, return ture, or false. 7 | * @author xgliao 8 | * 9 | */ 10 | public interface IAuthenticate { 11 | 12 | boolean authenticate(String username, String password); 13 | 14 | } 15 | -------------------------------------------------------------------------------- /di-data-service/src/test/java/com/ctrip/di/spark/TestSparkJdbc.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.spark; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | public class TestSparkJdbc { 7 | 8 | public static void main(String[] args) throws Exception { 9 | SparkService test = new SparkService(); 10 | List> result = test.executeSQL("show tables", "default"); 11 | System.out.println(result.size()); 12 | } 13 | 14 | } 15 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/dao/user/StatisticsUserMapper.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.dao.user; 2 | 3 | import java.util.List; 4 | 5 | import org.springframework.stereotype.Repository; 6 | 7 | /** 8 | * Statistics user Mapper 9 | * @author wu_jm 10 | * 11 | */ 12 | @Repository 13 | public interface StatisticsUserMapper { 14 | 15 | public void insertStatisticsUser(StatisticsUser user); 16 | 17 | public List queryAllStatistic(); 18 | 19 | } 20 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/dao/spark/SparkJobMapper.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.dao.spark; 2 | 3 | import java.util.List; 4 | 5 | import org.springframework.stereotype.Repository; 6 | 7 | /** 8 | * Spark job Mapper 9 | * @author xgliao 10 | * 11 | */ 12 | @Repository 13 | public interface SparkJobMapper { 14 | 15 | public void insertSparkJob(SparkJob sparkJob); 16 | 17 | public List getSparkJobsByUser(String userName); 18 | 19 | } 20 | -------------------------------------------------------------------------------- /monitor/src/monitor.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | setup.py 2 | src/monitor/__init__.py 3 | src/monitor/forms.py 4 | src/monitor/models.py 5 | src/monitor/settings.py 6 | src/monitor/urls.py 7 | src/monitor/views.py 8 | src/monitor.egg-info/PKG-INFO 9 | src/monitor.egg-info/SOURCES.txt 10 | src/monitor.egg-info/dependency_links.txt 11 | src/monitor.egg-info/entry_points.txt 12 | src/monitor.egg-info/not-zip-safe 13 | src/monitor.egg-info/requires.txt 14 | src/monitor.egg-info/top_level.txt -------------------------------------------------------------------------------- /sparksql/src/sparksql.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | setup.py 2 | src/sparksql/__init__.py 3 | src/sparksql/forms.py 4 | src/sparksql/models.py 5 | src/sparksql/settings.py 6 | src/sparksql/urls.py 7 | src/sparksql/views.py 8 | src/sparksql.egg-info/PKG-INFO 9 | src/sparksql.egg-info/SOURCES.txt 10 | src/sparksql.egg-info/dependency_links.txt 11 | src/sparksql.egg-info/entry_points.txt 12 | src/sparksql.egg-info/not-zip-safe 13 | src/sparksql.egg-info/requires.txt 14 | src/sparksql.egg-info/top_level.txt -------------------------------------------------------------------------------- /.classpath: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/dao/hive/HiveJobsMapper.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.dao.hive; 2 | 3 | import java.util.List; 4 | 5 | import org.springframework.stereotype.Repository; 6 | 7 | @Repository 8 | public interface HiveJobsMapper { 9 | 10 | public void insertHiveJobs(HiveJobsDo hiveJobsDo); 11 | 12 | public HiveJobsDo getHiveJobs(String dbname, String tablename); 13 | 14 | public List getAllHiveJobs(); 15 | 16 | void updateHiveJobs(HiveJobsDo hiveJobsDo); 17 | } 18 | -------------------------------------------------------------------------------- /monitor/src/monitor/templates/shared_components.mako: -------------------------------------------------------------------------------- 1 | 2 | <%! 3 | def is_selected(section, matcher): 4 | if section == matcher: 5 | return "active" 6 | else: 7 | return "" 8 | %> 9 | 10 | <%def name="menubar(section='')"> 11 | 19 | 20 | -------------------------------------------------------------------------------- /sparksql/src/sparksql/templates/shared_components.mako: -------------------------------------------------------------------------------- 1 | 2 | <%! 3 | def is_selected(section, matcher): 4 | if section == matcher: 5 | return "active" 6 | else: 7 | return "" 8 | %> 9 | 10 | <%def name="menubar(section='')"> 11 | 19 | 20 | -------------------------------------------------------------------------------- /.project: -------------------------------------------------------------------------------- 1 | 2 | 3 | Hue-Ctrip-DI 4 | 5 | 6 | 7 | 8 | 9 | org.python.pydev.PyDevBuilder 10 | 11 | 12 | 13 | 14 | org.eclipse.jdt.core.javabuilder 15 | 16 | 17 | 18 | 19 | 20 | org.eclipse.jdt.core.javanature 21 | org.python.pydev.pythonNature 22 | 23 | 24 | -------------------------------------------------------------------------------- /di-data-service/src/test/java/com/ctrip/di/hive/TestHiveCheckService.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.hive; 2 | 3 | import org.junit.Test; 4 | import org.springframework.context.ApplicationContext; 5 | import org.springframework.context.support.ClassPathXmlApplicationContext; 6 | 7 | import com.ctrip.di.hive.HiveCheckService; 8 | 9 | public class TestHiveCheckService { 10 | 11 | @Test 12 | public void testHiveCheck() { 13 | ApplicationContext context = new ClassPathXmlApplicationContext("rest-servlet.xml"); 14 | 15 | HiveCheckService hcs = context.getBean(HiveCheckService.class); 16 | 17 | hcs.checkHiveJob("test", "test2"); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /di-data-service/src/main/webapp/index.jsp: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | My JSP 'index.jsp' starting page 6 | 7 | 8 | 9 | 10 | 11 | 14 | 15 | 16 | 17 | This is my JSP page.
18 | 19 | 20 | -------------------------------------------------------------------------------- /di-data-service/src/main/resources/conf/mybatis/auth_user.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /di-data-service/README.md: -------------------------------------------------------------------------------- 1 | ## DI-Data-Service ## 2 | 3 | # Infrastructure # 4 | 5 | DI data Service leverage Ganglia to get all of metric from Hdfs, Yarn, HBase, Spark and so on. The Service will also get the metric by JMX server or any other API from the cluster. 6 | 7 | ![Off-CLI Installation](https://github.com/Ctrip-DI/Hue-Ctrip-DI/blob/master/di-data-service/docs/di-data-service.png) 8 | 9 | # How to build and deploy # 10 | Create table by the script of /di-data-service/src/main/resources/script/di.sql 11 | 12 | Configured /di-data-service/src/main/resources/conf/di.properties for some invironment 13 | 14 | Depend on Maven, and run command under the folder: 15 | mvn clean install 16 | 17 | Put the war package to web container like ‘tomcat or jetty’ 18 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/dao/user/AuthUser.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.dao.user; 2 | 3 | /** 4 | * Authenticate user data object 5 | * @author xgliao 6 | * 7 | */ 8 | public class AuthUser { 9 | private String userName; 10 | private String password; 11 | 12 | public String getUserName() { 13 | return userName; 14 | } 15 | 16 | public void setUserName(String userName) { 17 | this.userName = userName; 18 | } 19 | 20 | public String getPassword() { 21 | return password; 22 | } 23 | 24 | public void setPassword(String password) { 25 | this.password = password; 26 | } 27 | 28 | @Override 29 | public String toString() { 30 | return "User name is " + userName + ", password is " + password; 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /monitor/src/monitor/static/css/di_portal.css: -------------------------------------------------------------------------------- 1 | /* 2 | di_portal styles 3 | important: these should be namespaced 4 | example: 5 | let's say you are creating an app called "calculator"; you should prefix all your styles with your application's name like so 6 | 7 | .calculator img { 8 | border: 1px solid #000; 9 | } 10 | .calculator p { 11 | margin: 10px 0px; 12 | } 13 | etc... 14 | 15 | other notes: 16 | * don't use ids - there may be more than one of your apps open; use classes! 17 | * the toolbar element is absolutely positioned and 100% wide (and therefor 0px high); 18 | any elements inside it should also be absolutely positioned 19 | */ 20 | 21 | .di_portal img.di_portal_icon { 22 | width: 55px; 23 | height: 55px; 24 | position: absolute; 25 | top: 27px; 26 | left: 3px; 27 | } -------------------------------------------------------------------------------- /monitor/src/monitor/forms.py: -------------------------------------------------------------------------------- 1 | # Licensed to Cloudera, Inc. under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. Cloudera, Inc. licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | -------------------------------------------------------------------------------- /monitor/src/monitor/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed to Cloudera, Inc. under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. Cloudera, Inc. licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | -------------------------------------------------------------------------------- /monitor/src/monitor/models.py: -------------------------------------------------------------------------------- 1 | # Licensed to Cloudera, Inc. under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. Cloudera, Inc. licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | -------------------------------------------------------------------------------- /sparksql/src/sparksql/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed to Cloudera, Inc. under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. Cloudera, Inc. licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | -------------------------------------------------------------------------------- /sparksql/src/sparksql/forms.py: -------------------------------------------------------------------------------- 1 | # Licensed to Cloudera, Inc. under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. Cloudera, Inc. licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | -------------------------------------------------------------------------------- /sparksql/src/sparksql/models.py: -------------------------------------------------------------------------------- 1 | # Licensed to Cloudera, Inc. under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. Cloudera, Inc. licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/dao/user/StatisticsUser.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.dao.user; 2 | 3 | /** 4 | * @author wu_jm 5 | * 6 | */ 7 | public class StatisticsUser { 8 | private int id ; 9 | private int user_type; 10 | private int counts; 11 | private long timestamps; 12 | 13 | public int getId() { 14 | return id; 15 | } 16 | public void setId(int id) { 17 | this.id = id; 18 | } 19 | 20 | public int getUser_type() { 21 | return user_type; 22 | } 23 | public void setUser_type(int user_type) { 24 | this.user_type = user_type; 25 | } 26 | public int getCounts() { 27 | return counts; 28 | } 29 | public void setCounts(int counts) { 30 | this.counts = counts; 31 | } 32 | public long getTimestamps() { 33 | return timestamps; 34 | } 35 | public void setTimestamps(long timestamps) { 36 | this.timestamps = timestamps; 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /di-data-service/src/test/java/com/ctrip/di/ganglia/TestGangliaMetricService.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.ganglia; 2 | 3 | import java.util.Set; 4 | 5 | import net.sf.json.JSONObject; 6 | 7 | import com.ctrip.di.common.util.UrlUtils; 8 | 9 | public class TestGangliaMetricService { 10 | 11 | public static void main(String[] args) { 12 | String json = UrlUtils 13 | .getContent("http://10.8.75.3/ganglia/?r=hour&cs=&ce=&s=by+name&c=Hbase_Dashboard_Cluster&tab=m&vn=&hide-hf=false"); 14 | JSONObject jsonObject = JSONObject.fromObject(json); 15 | JSONObject messagejson = (JSONObject) jsonObject.get("message"); 16 | JSONObject clustersJson = (JSONObject) messagejson.get("clusters"); 17 | @SuppressWarnings("unchecked") 18 | Set test = clustersJson.keySet(); 19 | for (String cluster : test) { 20 | System.out.println(cluster); 21 | } 22 | } 23 | 24 | } 25 | -------------------------------------------------------------------------------- /monitor/Makefile: -------------------------------------------------------------------------------- 1 | # Licensed to Cloudera, Inc. under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. Cloudera, Inc. licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | ifeq ($(ROOT),) 17 | $(error "Error: Expect the environment variable $$ROOT to point to the Desktop installation") 18 | endif 19 | 20 | include $(ROOT)/Makefile.sdk 21 | -------------------------------------------------------------------------------- /monitor/src/monitor/settings.py: -------------------------------------------------------------------------------- 1 | # Licensed to Cloudera, Inc. under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. Cloudera, Inc. licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | DJANGO_APPS = [ "monitor" ] 17 | NICE_NAME = 'Monitor' 18 | REQUIRES_HADOOP = False 19 | MENU_INDEX = 100 20 | ICON = "/monitor/static/art/di_portal.png" 21 | -------------------------------------------------------------------------------- /sparksql/Makefile: -------------------------------------------------------------------------------- 1 | # Licensed to Cloudera, Inc. under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. Cloudera, Inc. licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | ifeq ($(ROOT),) 17 | $(error "Error: Expect the environment variable $$ROOT to point to the Desktop installation") 18 | endif 19 | 20 | include $(ROOT)/Makefile.sdk 21 | -------------------------------------------------------------------------------- /sparksql/src/sparksql/settings.py: -------------------------------------------------------------------------------- 1 | # Licensed to Cloudera, Inc. under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. Cloudera, Inc. licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | DJANGO_APPS = [ "sparksql" ] 17 | NICE_NAME = 'Spark SQL' 18 | REQUIRES_HADOOP = False 19 | MENU_INDEX = 100 20 | ICON = "/sparksql/static/art/sparksql.png" 21 | -------------------------------------------------------------------------------- /di-data-service/src/main/resources/conf/mybatis/statistics_user.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 17 | 18 | 19 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /sparksql/src/sparksql/urls.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Licensed to Cloudera, Inc. under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. Cloudera, Inc. licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | 18 | from django.conf.urls.defaults import patterns, url 19 | 20 | urlpatterns = patterns('sparksql', 21 | url(r'^$', 'views.index'), 22 | url(r'^jobs/$', 'views.jobs'), 23 | ) 24 | -------------------------------------------------------------------------------- /di-data-service/src/main/resources/conf/hdfs/test-hdfs-client-conf.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | fs.defaultFS 8 | hdfs://ns 9 | 10 | 11 | dfs.nameservices 12 | ns 13 | 14 | 15 | dfs.ha.namenodes.ns 16 | test 17 | 18 | 19 | dfs.namenode.rpc-address.ns.SVR2368HP360 20 | namenode:54310 21 | 22 | 23 | dfs.namenode.rpc-address.ns.SVR2369HP360 24 | namenode:54310 25 | 26 | 27 | dfs.client.failover.proxy.provider.ns 28 | org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider 29 | 30 | -------------------------------------------------------------------------------- /monitor/src/monitor/urls.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Licensed to Cloudera, Inc. under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. Cloudera, Inc. licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | 18 | from django.conf.urls.defaults import patterns, url 19 | 20 | urlpatterns = patterns('monitor', 21 | url(r'^$', 'views.index'), 22 | url(r'^metrics/$', 'views.metrics'), 23 | ) 24 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/dao/YarnJobUserCountDo.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.dao; 2 | 3 | /** 4 | * Yarn job count by user 5 | * @author xgliao 6 | * 7 | */ 8 | public class YarnJobUserCountDo extends YarnJobCountDo { 9 | 10 | private String user; 11 | private int mapCount; 12 | private int reduceCount; 13 | private long executionTime; 14 | 15 | public int getMapCount() { 16 | return mapCount; 17 | } 18 | 19 | public void setMapCount(int mapCount) { 20 | this.mapCount = mapCount; 21 | } 22 | 23 | public int getReduceCount() { 24 | return reduceCount; 25 | } 26 | 27 | public void setReduceCount(int reduceCount) { 28 | this.reduceCount = reduceCount; 29 | } 30 | 31 | public long getExecutionTime() { 32 | return executionTime; 33 | } 34 | 35 | public void setExecutionTime(long executionTime) { 36 | this.executionTime = executionTime; 37 | } 38 | 39 | public String getUser() { 40 | return user; 41 | } 42 | 43 | public void setUser(String user) { 44 | this.user = user; 45 | } 46 | 47 | } 48 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/dao/YarnUserJobDo.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.dao; 2 | 3 | /** 4 | * Yarn job count by user 5 | * @author xgliao 6 | * 7 | */ 8 | public class YarnUserJobDo { 9 | private String user; 10 | private int totalCount; 11 | private int successCount; 12 | private int failCount; 13 | 14 | public String getUser() { 15 | return user; 16 | } 17 | 18 | public void setUser(String user) { 19 | this.user = user; 20 | } 21 | 22 | public int getTotalCount() { 23 | return totalCount; 24 | } 25 | 26 | public void setTotalCount(int totalCount) { 27 | this.totalCount = totalCount; 28 | } 29 | 30 | public int getSuccessCount() { 31 | return successCount; 32 | } 33 | 34 | public void setSuccessCount(int successCount) { 35 | this.successCount = successCount; 36 | } 37 | 38 | public int getFailCount() { 39 | return failCount; 40 | } 41 | 42 | public void setFailCount(int failCount) { 43 | this.failCount = failCount; 44 | } 45 | 46 | public void init() { 47 | failCount = totalCount - successCount; 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/common/MetricConfigParser.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.common; 2 | 3 | import javax.xml.bind.JAXBContext; 4 | import javax.xml.bind.JAXBException; 5 | import javax.xml.bind.Unmarshaller; 6 | 7 | import org.springframework.beans.factory.annotation.Value; 8 | import org.springframework.stereotype.Component; 9 | 10 | import com.ctrip.di.pojo.gen.Clusters; 11 | 12 | /** 13 | * Metric Config Parser 14 | * To get user defined Metric Configuration, which can be show 15 | * in Critical metric report 16 | * @author xgliao 17 | * 18 | */ 19 | @Component 20 | public class MetricConfigParser { 21 | @Value("${METRIC_CONFIG_FILE}") 22 | private String metricConfigFile; 23 | 24 | public Clusters getClusters() throws JAXBException { 25 | JAXBContext jc = JAXBContext.newInstance(Clusters.class); 26 | Unmarshaller unmarshaller = jc.createUnmarshaller(); 27 | Clusters clusters = (Clusters) unmarshaller 28 | .unmarshal(MetricConfigParser.class.getClassLoader() 29 | .getResourceAsStream(metricConfigFile)); 30 | 31 | return clusters; 32 | } 33 | 34 | } 35 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 ctriposs 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /monitor/src/monitor/views.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Licensed to Cloudera, Inc. under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. Cloudera, Inc. licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | 18 | from desktop.lib.django_util import render 19 | import datetime 20 | 21 | def index(request): 22 | return render('index.mako', request, dict(date=datetime.datetime.now())) 23 | 24 | def metrics(request): 25 | return render('metrics.mako',request,{}) -------------------------------------------------------------------------------- /di-data-service/src/main/webapp/WEB-INF/web.xml: -------------------------------------------------------------------------------- 1 | 2 | 9 | di-data-service 10 | 11 | rest 12 | org.springframework.web.servlet.DispatcherServlet 13 | 14 | 1 15 | 16 | contextConfigLocation 17 | /WEB-INF/rest-servlet.xml 18 | 19 | 20 | 21 | rest 22 | / 23 | 24 | 25 | 26 | /index.jsp 27 | 28 | -------------------------------------------------------------------------------- /di-data-service/LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 ctriposs 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /sparksql/src/sparksql/views.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Licensed to Cloudera, Inc. under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. Cloudera, Inc. licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | 18 | from desktop.lib.django_util import render 19 | import datetime 20 | 21 | def index(request): 22 | return render('index.mako', request, dict(date=datetime.datetime.now())) 23 | 24 | def jobs(request): 25 | return render('jobs.mako', request, dict(date=datetime.datetime.now())) 26 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/hive/util/HDFSHelper.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.hive.util; 2 | 3 | import java.io.IOException; 4 | import java.net.URL; 5 | 6 | import org.apache.commons.logging.Log; 7 | import org.apache.commons.logging.LogFactory; 8 | import org.apache.hadoop.conf.Configuration; 9 | import org.apache.hadoop.fs.FileSystem; 10 | import org.springframework.beans.factory.annotation.Value; 11 | import org.springframework.stereotype.Service; 12 | 13 | @Service 14 | public class HDFSHelper { 15 | 16 | @Value("${HDFS_CONFIG}") 17 | private String hdfs_conf; 18 | 19 | private static final Log LOG = LogFactory.getLog(HDFSHelper.class); 20 | 21 | private FileSystem fs = null; 22 | 23 | public FileSystem getFileSystem() { 24 | if (fs == null) { 25 | URL configUrl = HDFSHelper.class.getClassLoader().getResource( 26 | hdfs_conf); 27 | Configuration conf = new Configuration(); 28 | conf.addResource(configUrl); 29 | 30 | System.setProperty("HADOOP_USER_NAME", "hdfs"); 31 | try { 32 | fs = FileSystem.get(conf); 33 | } catch (IOException e) { 34 | LOG.error(e); 35 | } 36 | } 37 | 38 | return fs; 39 | } 40 | 41 | } 42 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/dao/YarnJobsMapper.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.dao; 2 | 3 | import java.util.List; 4 | 5 | import org.springframework.stereotype.Repository; 6 | 7 | /** 8 | * Yarn jobs Mapper 9 | * @author xgliao 10 | * 11 | */ 12 | @Repository 13 | public interface YarnJobsMapper { 14 | 15 | public void insertYarnJob(YarnJobsDo yarnJobsDo); 16 | 17 | public List getYanJobs(); 18 | 19 | public Long getMaxStartTime(); 20 | 21 | public List getAllCount(); 22 | 23 | public List getJobCountByDate(int date); 24 | 25 | public List getJobCountByPageRange(int start, int end); 26 | 27 | public List getCountByDateUser(); 28 | 29 | public List getCountByDateUserD(int date); 30 | 31 | public List getCountByDateUserForPage(int start, int num); 32 | 33 | public List getNewestUserJobCount(); 34 | 35 | public List getUserJobCountByDate(int date); 36 | 37 | public List getYarnJobUserByDate(String date); 38 | 39 | public List getYarnJobUserByUserName(String userName); 40 | 41 | } 42 | -------------------------------------------------------------------------------- /di-data-service/src/main/resources/conf/mybatis/mybatis-config.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /di-data-service/src/main/resources/conf/hive/test-hive-site.xml: -------------------------------------------------------------------------------- 1 | 2 | 18 | 19 | 20 | 21 | 22 | 23 | hive.metastore.uris 24 | thrift://metastore-host:9083 25 | IP address (or fully-qualified domain name) and port of the metastore host 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/dao/spark/SparkJob.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.dao.spark; 2 | 3 | /** 4 | * Spark job data object 5 | * @author xgliao 6 | * 7 | */ 8 | public class SparkJob { 9 | private int id; 10 | private long startTime; 11 | private long finishTime; 12 | private String sql; 13 | private String user; 14 | private String status; 15 | 16 | public String getSql() { 17 | return sql; 18 | } 19 | 20 | public void setSql(String sql) { 21 | this.sql = sql; 22 | } 23 | 24 | public int getId() { 25 | return id; 26 | } 27 | 28 | public void setId(int id) { 29 | this.id = id; 30 | } 31 | 32 | public long getStartTime() { 33 | return startTime; 34 | } 35 | 36 | public void setStartTime(long startTime) { 37 | this.startTime = startTime; 38 | } 39 | 40 | public long getFinishTime() { 41 | return finishTime; 42 | } 43 | 44 | public void setFinishTime(long finishTime) { 45 | this.finishTime = finishTime; 46 | } 47 | 48 | public String getUser() { 49 | return user; 50 | } 51 | 52 | public void setUser(String user) { 53 | this.user = user; 54 | } 55 | 56 | public String getStatus() { 57 | return status; 58 | } 59 | 60 | public void setStatus(String status) { 61 | this.status = status; 62 | } 63 | 64 | } 65 | -------------------------------------------------------------------------------- /di-data-service/src/main/resources/conf/mybatis/SparkJobs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 30 | 31 | 32 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /di-data-service/.classpath: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/common/util/PrintWriterUtil.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.common.util; 2 | 3 | import java.io.IOException; 4 | import java.io.PrintWriter; 5 | 6 | import javax.servlet.http.HttpServletRequest; 7 | import javax.servlet.http.HttpServletResponse; 8 | 9 | import org.apache.commons.logging.Log; 10 | import org.apache.commons.logging.LogFactory; 11 | 12 | /** 13 | * Print callback json util 14 | * @author xgliao 15 | * 16 | */ 17 | public class PrintWriterUtil { 18 | private static Log logger = LogFactory.getLog(PrintWriterUtil.class); 19 | 20 | public static void writeJson(HttpServletRequest request, 21 | HttpServletResponse response, String data) { 22 | response.setContentType("application/json"); 23 | String jsonpCallback = request.getParameter("jsonpCallback"); 24 | PrintWriter pw = null; 25 | try { 26 | pw = response.getWriter(); 27 | pw.println(jsonpCallback + "(" + data + ")"); 28 | } catch (IOException e) { 29 | logger.error("Json Write Error:", e); 30 | } finally { 31 | if (pw != null) { 32 | pw.flush(); 33 | pw.close(); 34 | } 35 | } 36 | 37 | } 38 | 39 | public static void writeError(HttpServletRequest request, 40 | HttpServletResponse response, String error) { 41 | String json = "{\"status\":\"error\",\"message\":\"" + error + ".\"}"; 42 | 43 | writeJson(request, response, json); 44 | } 45 | 46 | } 47 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/common/util/UrlUtils.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.common.util; 2 | 3 | import java.io.BufferedReader; 4 | import java.io.IOException; 5 | import java.io.InputStreamReader; 6 | import java.net.URL; 7 | import java.net.URLConnection; 8 | 9 | /** 10 | * Get the content from the url 11 | * @author xgliao 12 | * 13 | */ 14 | public class UrlUtils { 15 | 16 | public static String getContent(String urlStr) { 17 | StringBuilder sb = new StringBuilder(); 18 | InputStreamReader inputStreamReader = null; 19 | BufferedReader br = null; 20 | try { 21 | URL url = new URL(urlStr); 22 | URLConnection conn = url.openConnection(); 23 | 24 | inputStreamReader = new InputStreamReader(conn.getInputStream()); 25 | br = new BufferedReader(inputStreamReader); 26 | 27 | String line = null; 28 | while ((line = br.readLine()) != null) { 29 | sb.append(line); 30 | } 31 | } catch (Exception e) { 32 | throw new RuntimeException("Url connection failed:" + urlStr, e); 33 | } finally { 34 | if (inputStreamReader != null) { 35 | try { 36 | inputStreamReader.close(); 37 | } catch (IOException e) { 38 | // Ignore 39 | } 40 | } 41 | if (br != null) { 42 | try { 43 | br.close(); 44 | } catch (IOException e) { 45 | // Ignore 46 | } 47 | } 48 | } 49 | return sb.toString(); 50 | } 51 | 52 | } 53 | -------------------------------------------------------------------------------- /di-data-service/src/test/java/com/ctrip/di/ganglia/TestGangliaHttpParser.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.ganglia; 2 | 3 | import java.net.URL; 4 | 5 | import org.htmlparser.Node; 6 | import org.htmlparser.NodeFilter; 7 | import org.htmlparser.Parser; 8 | import org.htmlparser.filters.AndFilter; 9 | import org.htmlparser.filters.HasAttributeFilter; 10 | import org.htmlparser.filters.TagNameFilter; 11 | import org.htmlparser.tags.OptionTag; 12 | import org.htmlparser.util.NodeList; 13 | import org.htmlparser.util.SimpleNodeIterator; 14 | 15 | public class TestGangliaHttpParser { 16 | 17 | public static void main(String[] args) throws Exception { 18 | Parser parser = new Parser(new URL("http://10.8.75.3/ganglia/?r=hour&cs=&ce=&s=by+name&c=Zookeeper_Cluster&tab=m&vn=&hide-hf=false").openConnection()); 19 | NodeFilter nodeFilter = new AndFilter(new TagNameFilter("select"), 20 | new HasAttributeFilter("id", "metrics-picker")); 21 | NodeList nodeList = parser.extractAllNodesThatMatch(nodeFilter); 22 | SimpleNodeIterator iterator = nodeList.elements(); 23 | while (iterator.hasMoreNodes()) { 24 | Node node = iterator.nextNode(); 25 | 26 | SimpleNodeIterator childIterator = node.getChildren().elements(); 27 | while (childIterator.hasMoreNodes()) { 28 | OptionTag children = (OptionTag) childIterator.nextNode(); 29 | System.out.println(children.getOptionText()); 30 | } 31 | } 32 | 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /sparksql/src/sparksql/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Licensed to Cloudera, Inc. under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. Cloudera, Inc. licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | """ 18 | Configuration options for the reverse sparksql application. 19 | """ 20 | from django.utils.translation import ugettext_lazy as _ 21 | 22 | from desktop.lib.conf import Config, ConfigSection 23 | 24 | SPARK_SERVICE = ConfigSection( 25 | key='spark-service', 26 | help=_('Configuration options for Oauth 1.0 authentication'), 27 | members=dict( 28 | SPARK_SQL_URL = Config( 29 | key="spark_sql_url", 30 | help=_("The Consumer key of the application."), 31 | type=str, 32 | default="http://localhost:8080/di-data-service/" 33 | ) 34 | ) 35 | ) 36 | -------------------------------------------------------------------------------- /monitor/src/monitor/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Licensed to Cloudera, Inc. under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. Cloudera, Inc. licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | """ 18 | Configuration options for the reverse monitor application. 19 | """ 20 | from django.utils.translation import ugettext_lazy as _ 21 | 22 | from desktop.lib.conf import Config, ConfigSection 23 | 24 | DATA_SERVICE = ConfigSection( 25 | key='di-service', 26 | help=_('Configuration options for Oauth 1.0 authentication'), 27 | members=dict( 28 | DI_DATA_SERVICE_URL = Config( 29 | key="di_data_service_url", 30 | help=_("The Consumer key of the application."), 31 | type=str, 32 | default="http://localhost:8080/di-data-service/" 33 | ) 34 | ) 35 | ) 36 | -------------------------------------------------------------------------------- /di-data-service/src/main/resources/conf/di.properties: -------------------------------------------------------------------------------- 1 | ## Ganglia host IP 2 | GANGLIA_HOST=localhost 3 | GANGLIA_GRAPH_URL=http://${GANGLIA_HOST}/ganglia/graph.php? 4 | GANGLIA_HOST_URL=http://${GANGLIA_HOST}/ganglia/api/host.php?action=list 5 | GANGLIA_METRIC_URL=http://${GANGLIA_HOST}/ganglia/?r=hour&cs=&ce=&s=by+name&c=#CLUSTER_NAME#&tab=m&vn=&hide-hf=false 6 | GANGLIA_METRIC_URL_ClUSTER_PATTERN=#CLUSTER_NAME# 7 | 8 | ## YARN HOST 9 | YARN_HISTORY_JOB_URL=http://localhost:19988/ws/v1/history/mapreduce/jobs 10 | 11 | METRIC_CONFIG_FILE=conf/metric/portal_hadoop_config_test.xml 12 | HDFS_CONFIG=conf/hdfs/test-hdfs-client-conf.xml 13 | HADOOP_USER_NAME=hdfs 14 | 15 | #Hive alert and clean config 16 | HIVE_CONFIG=conf/hive/test-hive-site.xml 17 | REMOVE_PARTITION_NUM_ALERT_THRESHOLD=5 18 | HIVE_CHECK_AVG_PARTITION_NUM=7 19 | ADMIN_EMAIL=xgliao@Ctrip.com 20 | 21 | ##MYSQL CONFIG 22 | jdbc.driverClassName=com.mysql.jdbc.Driver 23 | jdbc.url=jdbc:mysql://localhost:3306/hue?useUnicode=true&characterEncoding=UTF-8&autocommit=false 24 | jdbc.username=*** 25 | jdbc.password=*** 26 | 27 | #hdfs and yarn jmx server host 28 | HDFS_JMX_SERVER=service:jmx:rmi:///jndi/rmi://localhost:8006/jmxrmi 29 | YARN_JMX_SERVER=service:jmx:rmi:///jndi/rmi://localhost:8026/jmxrmi 30 | 31 | ##Spark jdbc 32 | SPARK_JDBC_URL=jdbc:hive2://localhost:10000/default 33 | 34 | # Ctrip Ldap url 35 | LDAP_URL=ldap://localhost:389/DC=cn1,DC=global,DC=ctrip,DC=com 36 | 37 | socket_time_out =14*24*60*60*1000 -------------------------------------------------------------------------------- /di-data-service/src/test/java/com/ctrip/di/TestMain.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di; 2 | 3 | import java.io.IOException; 4 | 5 | import org.apache.hadoop.conf.Configuration; 6 | import org.apache.hadoop.fs.ContentSummary; 7 | import org.apache.hadoop.fs.FileSystem; 8 | import org.apache.hadoop.fs.FsStatus; 9 | import org.apache.hadoop.fs.Path; 10 | import org.apache.hadoop.hdfs.DistributedFileSystem; 11 | import org.apache.hadoop.hdfs.protocol.DatanodeInfo; 12 | 13 | public class TestMain { 14 | 15 | public void test() throws IOException { 16 | Configuration conf = new Configuration(); 17 | conf.addResource("conf/hdfs/test-hdfs-client-conf.xml"); 18 | 19 | System.setProperty("HADOOP_USER_NAME", "hdfs"); 20 | DistributedFileSystem fs = (DistributedFileSystem) FileSystem.get(conf); 21 | 22 | DatanodeInfo[] dataNodeStatus = fs.getDataNodeStats(); 23 | for (DatanodeInfo dninfo : dataNodeStatus) { 24 | System.out.println(dninfo.getHostName() + ", Is Decommission:" 25 | + dninfo.isDecommissioned()); 26 | System.out.println(dninfo.getHostName() + ", Dump Data node:" 27 | + dninfo.dumpDatanode()); 28 | } 29 | System.out.println("Default block size:" + fs.getDefaultBlockSize()); 30 | ContentSummary contentSummary = fs.getContentSummary(new Path("/")); 31 | System.out.println("Content Summary:" + contentSummary); 32 | 33 | FsStatus fsstatus = fs.getStatus(); 34 | 35 | System.out.println(fsstatus.getCapacity()); 36 | } 37 | 38 | public static void main(String[] args) throws IOException { 39 | TestMain test = new TestMain(); 40 | test.test(); 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /di-data-service/src/test/java/com/ctrip/di/hive/TestHDFSHelper.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.hive; 2 | 3 | import java.io.FileNotFoundException; 4 | import java.io.IOException; 5 | 6 | import org.apache.hadoop.fs.FileStatus; 7 | import org.apache.hadoop.fs.FileSystem; 8 | import org.apache.hadoop.fs.Path; 9 | import org.junit.Test; 10 | import org.springframework.context.ApplicationContext; 11 | import org.springframework.context.support.ClassPathXmlApplicationContext; 12 | 13 | import com.ctrip.di.hive.util.HDFSHelper; 14 | 15 | public class TestHDFSHelper { 16 | 17 | @Test 18 | public void testGetHDFS() { 19 | ApplicationContext context = new ClassPathXmlApplicationContext("rest-servlet.xml"); 20 | 21 | HDFSHelper hdfsHelper = context.getBean(HDFSHelper.class); 22 | 23 | FileSystem fs = hdfsHelper.getFileSystem(); 24 | 25 | try { 26 | FileStatus[] status = fs.listStatus(new Path("/user/hdfs")); 27 | for(FileStatus fileStatus : status) { 28 | System.out.println("File:" + fileStatus.getPath()); 29 | } 30 | System.out.println(status[0]); 31 | } catch (FileNotFoundException e) { 32 | e.printStackTrace(); 33 | } catch (IOException e) { 34 | e.printStackTrace(); 35 | } 36 | } 37 | 38 | 39 | //@Test 40 | public void testRMHDFS() throws IOException { 41 | ApplicationContext context = new ClassPathXmlApplicationContext("rest-servlet.xml"); 42 | 43 | HDFSHelper hdfsHelper = context.getBean(HDFSHelper.class); 44 | FileSystem fs = hdfsHelper.getFileSystem(); 45 | 46 | System.out.println(fs.mkdirs(new Path("/user/you/testxgliao"))); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/hdfs/HdfsDirSummary.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.hdfs; 2 | 3 | /** 4 | * Hdfs directory summary data object 5 | * @author xgliao 6 | * 7 | */ 8 | public class HdfsDirSummary { 9 | private String user; 10 | private long length; 11 | private long fileCount; 12 | private long directoryCount; 13 | private long quota; 14 | private long spaceConsumed; 15 | private long spaceQuota; 16 | 17 | public String getUser() { 18 | return user; 19 | } 20 | 21 | public void setUser(String user) { 22 | this.user = user; 23 | } 24 | 25 | public long getLength() { 26 | return length; 27 | } 28 | 29 | public void setLength(long length) { 30 | this.length = length; 31 | } 32 | 33 | public long getFileCount() { 34 | return fileCount; 35 | } 36 | 37 | public void setFileCount(long fileCount) { 38 | this.fileCount = fileCount; 39 | } 40 | 41 | public long getDirectoryCount() { 42 | return directoryCount; 43 | } 44 | 45 | public void setDirectoryCount(long directoryCount) { 46 | this.directoryCount = directoryCount; 47 | } 48 | 49 | public long getQuota() { 50 | return quota; 51 | } 52 | 53 | public void setQuota(long quota) { 54 | this.quota = quota; 55 | } 56 | 57 | public long getSpaceConsumed() { 58 | return spaceConsumed; 59 | } 60 | 61 | public void setSpaceConsumed(long spaceConsumed) { 62 | this.spaceConsumed = spaceConsumed; 63 | } 64 | 65 | public long getSpaceQuota() { 66 | return spaceQuota; 67 | } 68 | 69 | public void setSpaceQuota(long spaceQuota) { 70 | this.spaceQuota = spaceQuota; 71 | } 72 | 73 | } 74 | -------------------------------------------------------------------------------- /sparksql/src/sparksql/static/css/sparksql.css: -------------------------------------------------------------------------------- 1 | /* 2 | sparksql styles 3 | important: these should be namespaced 4 | example: 5 | let's say you are creating an app called "calculator"; you should prefix all your styles with your application's name like so 6 | 7 | .calculator img { 8 | border: 1px solid #000; 9 | } 10 | .calculator p { 11 | margin: 10px 0px; 12 | } 13 | etc... 14 | 15 | other notes: 16 | * don't use ids - there may be more than one of your apps open; use classes! 17 | * the toolbar element is absolutely positioned and 100% wide (and therefor 0px high); 18 | any elements inside it should also be absolutely positioned 19 | */ 20 | 21 | .sparksql img.sparksql_icon { 22 | width: 55px; 23 | height: 55px; 24 | position: absolute; 25 | top: 27px; 26 | left: 3px; 27 | } 28 | 29 | #dangerMessage{ 30 | width:99.6%; 31 | } 32 | #sql_context{ 33 | overflow-y: visible; 34 | height: 160px; 35 | width: 99%; 36 | } 37 | 38 | #sparksql-execute { 39 | margin-top: 100px; 40 | margin-right: -120px; 41 | width: 80px; 42 | } 43 | 44 | .th_title{ 45 | background-color: #ccc; 46 | } 47 | 48 | .single{ 49 | background: rgba(20, 199, 185, 0.21) ; 50 | } 51 | 52 | .double{ 53 | background: rgba(177, 240, 232, 0.36); 54 | } 55 | 56 | .loading-mask{background-color:#000;opacity:0.30;filter:alpha(opacity=30);} 57 | .loading{display:inline-block;*display:inline;*zoom:1;top:50%;left:50%;padding:20px 40px 16px;background-color:#fff;text-align:center;color:#666;font-size:12px;border:1px solid #888;border-radius:2px;box-shadow:2px 2px 0 #999;} 58 | .loading > img{margin:0 10px 0 0;position:relative;top:-2px;} -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/common/jmx/YarnJmxBean.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.common.jmx; 2 | 3 | public class YarnJmxBean { 4 | private int appsSumitted; 5 | private int appsPending; 6 | private int appsRunning; 7 | private int appsCompleted; 8 | private int appsFailed; 9 | private int appsKilled; 10 | 11 | private String liveNodeManagers; 12 | 13 | public int getAppsSumitted() { 14 | return appsSumitted; 15 | } 16 | 17 | public void setAppsSumitted(int appsSumitted) { 18 | this.appsSumitted = appsSumitted; 19 | } 20 | 21 | public int getAppsPending() { 22 | return appsPending; 23 | } 24 | 25 | public void setAppsPending(int appsPending) { 26 | this.appsPending = appsPending; 27 | } 28 | 29 | public int getAppsRunning() { 30 | return appsRunning; 31 | } 32 | 33 | public void setAppsRunning(int appsRunning) { 34 | this.appsRunning = appsRunning; 35 | } 36 | 37 | public int getAppsCompleted() { 38 | return appsCompleted; 39 | } 40 | 41 | public void setAppsCompleted(int appsCompleted) { 42 | this.appsCompleted = appsCompleted; 43 | } 44 | 45 | public int getAppsFailed() { 46 | return appsFailed; 47 | } 48 | 49 | public void setAppsFailed(int appsFailed) { 50 | this.appsFailed = appsFailed; 51 | } 52 | 53 | public int getAppsKilled() { 54 | return appsKilled; 55 | } 56 | 57 | public void setAppsKilled(int appsKilled) { 58 | this.appsKilled = appsKilled; 59 | } 60 | 61 | public String getLiveNodeManagers() { 62 | return liveNodeManagers; 63 | } 64 | 65 | public void setLiveNodeManagers(String liveNodeManagers) { 66 | this.liveNodeManagers = liveNodeManagers; 67 | } 68 | 69 | } 70 | -------------------------------------------------------------------------------- /di-data-service/src/test/java/com/ctrip/di/hive/TestHiveHelper.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.hive; 2 | 3 | import java.util.List; 4 | 5 | import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; 6 | import org.apache.hadoop.hive.metastore.api.MetaException; 7 | import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; 8 | import org.apache.hadoop.hive.metastore.api.Partition; 9 | import org.apache.thrift.TException; 10 | import org.junit.Test; 11 | import org.springframework.context.ApplicationContext; 12 | import org.springframework.context.support.ClassPathXmlApplicationContext; 13 | 14 | import com.ctrip.di.hive.util.HiveHelper; 15 | 16 | public class TestHiveHelper { 17 | 18 | @Test 19 | public void testHiveHelper() { 20 | 21 | ApplicationContext context = new ClassPathXmlApplicationContext("rest-servlet.xml"); 22 | 23 | HiveHelper hiveHelper = context.getBean(HiveHelper.class); 24 | 25 | HiveMetaStoreClient hiveClient = hiveHelper.getHiveMetaStoreClient(); 26 | 27 | List partitionList = null; 28 | 29 | try { 30 | partitionList = hiveClient.listPartitionsByFilter("test", "test6", "pt=\"2014-10-03\" and hour=\"01\"", Short.MAX_VALUE); 31 | } catch (NoSuchObjectException e) { 32 | System.out.println("no table named:" + e); 33 | } catch (MetaException e) { 34 | System.out.println("hive metastore exception: " + e); 35 | } catch (TException e) { 36 | System.out.println(e); 37 | } 38 | 39 | for(Partition p : partitionList) { 40 | System.out.println(p.getValues().get(0)); 41 | System.out.println(p.getSd().getLocation()); 42 | System.out.println(p.getValues()); 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /di-data-service/src/main/resources/script/di.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS `ctrip_yarn_jobs`; 2 | CREATE TABLE `ctrip_yarn_jobs` ( 3 | `id` int(11) NOT NULL AUTO_INCREMENT, 4 | `start_time` bigint(20) NOT NULL, 5 | `finish_time` bigint(20) NOT NULL, 6 | `job_id` varchar(256) DEFAULT NULL, 7 | `queue` varchar(128) DEFAULT NULL, 8 | `user` varchar(128) DEFAULT NULL, 9 | `status` varchar(64) DEFAULT NULL, 10 | `maps_total` int(11) NOT NULL, 11 | `reduces_total` int(11) NOT NULL, 12 | `date_str` varchar(64) DEFAULT NULL, 13 | PRIMARY KEY (`id`), 14 | KEY `user_date_index` (`user`,`date_str`) 15 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8; 16 | 17 | DROP TABLE IF EXISTS `spark_jobs`; 18 | CREATE TABLE `spark_jobs` ( 19 | `id` int(11) NOT NULL AUTO_INCREMENT, 20 | `start_time` bigint(20) NOT NULL, 21 | `finish_time` bigint(20) NOT NULL, 22 | `sql` varchar(2048) DEFAULT NULL, 23 | `user` varchar(128) DEFAULT NULL, 24 | `status` varchar(64) DEFAULT NULL, 25 | PRIMARY KEY (`id`), 26 | KEY `user_index` (`user`) 27 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8; 28 | 29 | DROP TABLE IF EXISTS `ctrip_hive_jobs`; 30 | CREATE TABLE `ctrip_hive_jobs` ( 31 | `dbname` varchar(100) NOT NULL DEFAULT '', 32 | `tablename` varchar(100) NOT NULL DEFAULT '', 33 | `pt_format` varchar(200) DEFAULT NULL, 34 | `keepdays` int(11) DEFAULT '0', 35 | `checkrate` float DEFAULT '0', 36 | `username` varchar(100) DEFAULT NULL, 37 | `email` varchar(100) DEFAULT NULL, 38 | `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, 39 | `modified_time` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00', 40 | PRIMARY KEY (`dbname`,`tablename`) 41 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -------------------------------------------------------------------------------- /monitor/src/monitor/static/js/fnSetFilteringDelay.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Enables filtration delay for keeping the browser more responsive while 3 | * searching for a longer keyword. 4 | * 5 | * This can be particularly useful when working with server-side processing, 6 | * where you wouldn't typically want an Ajax request to be made with every key 7 | * press the user makes when searching the table. 8 | * 9 | * @name fnSetFilteringDelay 10 | * @summary Add a key debouce delay to the global filtering input of a table 11 | * @author [Zygimantas Berziunas](http://www.zygimantas.com/), 12 | * [Allan Jardine](http://www.sprymedia.co.uk/) and _vex_ 13 | * 14 | * @example 15 | * $(document).ready(function() { 16 | * $('.dataTable').dataTable().fnSetFilteringDelay(); 17 | * } ); 18 | */ 19 | 20 | jQuery.fn.dataTableExt.oApi.fnSetFilteringDelay = function ( oSettings, iDelay ) { 21 | var _that = this; 22 | 23 | if ( iDelay === undefined ) { 24 | iDelay = 250; 25 | } 26 | 27 | this.each( function ( i ) { 28 | $.fn.dataTableExt.iApiIndex = i; 29 | var 30 | $this = this, 31 | oTimerId = null, 32 | sPreviousSearch = null, 33 | anControl = $( 'input', _that.fnSettings().aanFeatures.f ); 34 | 35 | anControl.unbind( 'keyup search input' ).bind( 'keyup search input', function() { 36 | var $$this = $this; 37 | 38 | if (sPreviousSearch === null || sPreviousSearch != anControl.val()) { 39 | window.clearTimeout(oTimerId); 40 | sPreviousSearch = anControl.val(); 41 | oTimerId = window.setTimeout(function() { 42 | $.fn.dataTableExt.iApiIndex = i; 43 | _that.fnFilter( anControl.val() ); 44 | }, iDelay); 45 | } 46 | }); 47 | 48 | return this; 49 | } ); 50 | return this; 51 | }; 52 | -------------------------------------------------------------------------------- /di-data-service/src/main/resources/metric.xsd: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | -------------------------------------------------------------------------------- /monitor/setup.py: -------------------------------------------------------------------------------- 1 | # Licensed to Cloudera, Inc. under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. Cloudera, Inc. licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | from setuptools import setup, find_packages 17 | import os 18 | 19 | def expand_package_data(src_dirs, strip=""): 20 | ret = [] 21 | for src_dir in src_dirs: 22 | for path, dnames, fnames in os.walk(src_dir): 23 | for fname in fnames: 24 | ret.append(os.path.join(path, fname).replace(strip, "")) 25 | return ret 26 | 27 | os.chdir(os.path.dirname(os.path.abspath(__file__))) 28 | setup( 29 | name = "monitor", 30 | version = "0.1", 31 | url = 'TODO', 32 | description = 'TODO', 33 | author = 'TODO', 34 | packages = find_packages('src'), 35 | package_dir = {'': 'src'}, 36 | install_requires = ['setuptools', 'desktop'], 37 | entry_points = { 'desktop.sdk.application': 'monitor=monitor' }, 38 | zip_safe = False, 39 | package_data = { 40 | # Include static resources. Package_data doesn't 41 | # deal well with directory globs, so we enumerate 42 | # the files manually. 43 | 'monitor': expand_package_data( 44 | ["src/monitor/templates", "src/monitor/static"], 45 | "src/monitor/") 46 | } 47 | ) 48 | -------------------------------------------------------------------------------- /sparksql/setup.py: -------------------------------------------------------------------------------- 1 | # Licensed to Cloudera, Inc. under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. Cloudera, Inc. licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | from setuptools import setup, find_packages 17 | import os 18 | 19 | def expand_package_data(src_dirs, strip=""): 20 | ret = [] 21 | for src_dir in src_dirs: 22 | for path, dnames, fnames in os.walk(src_dir): 23 | for fname in fnames: 24 | ret.append(os.path.join(path, fname).replace(strip, "")) 25 | return ret 26 | 27 | os.chdir(os.path.dirname(os.path.abspath(__file__))) 28 | setup( 29 | name = "sparksql", 30 | version = "0.1", 31 | url = 'TODO', 32 | description = 'TODO', 33 | author = 'TODO', 34 | packages = find_packages('src'), 35 | package_dir = {'': 'src'}, 36 | install_requires = ['setuptools', 'desktop'], 37 | entry_points = { 'desktop.sdk.application': 'sparksql=sparksql' }, 38 | zip_safe = False, 39 | package_data = { 40 | # Include static resources. Package_data doesn't 41 | # deal well with directory globs, so we enumerate 42 | # the files manually. 43 | 'sparksql': expand_package_data( 44 | ["src/sparksql/templates", "src/sparksql/static"], 45 | "src/sparksql/") 46 | } 47 | ) 48 | -------------------------------------------------------------------------------- /di-data-service/.project: -------------------------------------------------------------------------------- 1 | 2 | 3 | di-data-service 4 | 5 | 6 | 7 | 8 | 9 | org.eclipse.wst.jsdt.core.javascriptValidator 10 | 11 | 12 | 13 | 14 | org.eclipse.jdt.core.javabuilder 15 | 16 | 17 | 18 | 19 | org.eclipse.wst.common.project.facet.core.builder 20 | 21 | 22 | 23 | 24 | org.eclipse.wst.validation.validationbuilder 25 | 26 | 27 | 28 | 29 | com.genuitec.eclipse.j2eedt.core.DeploymentDescriptorValidator 30 | 31 | 32 | 33 | 34 | com.genuitec.eclipse.ast.deploy.core.DeploymentBuilder 35 | 36 | 37 | 38 | 39 | com.genuitec.eclipse.springframework.springbuilder 40 | 41 | 42 | 43 | 44 | org.eclipse.m2e.core.maven2Builder 45 | 46 | 47 | 48 | 49 | 50 | com.genuitec.eclipse.springframework.springnature 51 | org.eclipse.m2e.core.maven2Nature 52 | org.eclipse.jem.workbench.JavaEMFNature 53 | org.eclipse.wst.common.modulecore.ModuleCoreNature 54 | org.eclipse.wst.common.project.facet.core.nature 55 | org.eclipse.jdt.core.javanature 56 | org.eclipse.wst.jsdt.core.jsNature 57 | 58 | 59 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/common/authenticate/LdapAuthenticate.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.common.authenticate; 2 | 3 | import java.util.Hashtable; 4 | 5 | import javax.naming.Context; 6 | import javax.naming.NamingException; 7 | import javax.naming.ldap.InitialLdapContext; 8 | import javax.naming.ldap.LdapContext; 9 | 10 | import org.apache.commons.logging.Log; 11 | import org.apache.commons.logging.LogFactory; 12 | import org.springframework.beans.factory.annotation.Value; 13 | import org.springframework.stereotype.Service; 14 | 15 | /** 16 | * Ldap authenticate. 17 | * Currently, there is two type of users in di portal. One is Ldap user, 18 | * and the other is hadoop user. 19 | * @author xgliao 20 | * 21 | */ 22 | @Service 23 | public class LdapAuthenticate implements IAuthenticate { 24 | private static Log logger = LogFactory 25 | .getLog(LdapAuthenticate.class); 26 | 27 | @Value("${LDAP_URL}") 28 | private String ldapUrl; 29 | 30 | @SuppressWarnings({ "rawtypes", "unchecked" }) 31 | public LdapContext connectLdap(String ldapAccount, String ldapPwd, 32 | String range) throws NamingException { 33 | String ldapFactory = "com.sun.jndi.ldap.LdapCtxFactory"; 34 | Hashtable env = new Hashtable(); 35 | env.put(Context.INITIAL_CONTEXT_FACTORY, ldapFactory); 36 | env.put(Context.PROVIDER_URL, ldapUrl); 37 | env.put(Context.SECURITY_AUTHENTICATION, "simple"); 38 | env.put(Context.SECURITY_PRINCIPAL, range + "\\" + ldapAccount); 39 | env.put(Context.SECURITY_CREDENTIALS, ldapPwd); 40 | env.put("java.naming.referral", "follow"); 41 | LdapContext ctxTDS = new InitialLdapContext(env, null); 42 | return ctxTDS; 43 | } 44 | 45 | public boolean authenticate(String username, String password) { 46 | String range = "cn1"; 47 | try { 48 | connectLdap(username, password, range); 49 | return true; 50 | } catch (Exception e) { 51 | logger.warn("Auth error:", e); 52 | return false; 53 | } 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/dao/hive/HiveJobsService.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.dao.hive; 2 | 3 | import java.util.List; 4 | 5 | import org.apache.commons.logging.Log; 6 | import org.apache.commons.logging.LogFactory; 7 | import org.springframework.beans.factory.annotation.Autowired; 8 | import org.springframework.stereotype.Service; 9 | import org.springframework.transaction.annotation.Transactional; 10 | 11 | @Service 12 | public class HiveJobsService { 13 | private static final Log logger = LogFactory.getLog(HiveJobsService.class); 14 | 15 | @Autowired 16 | private HiveJobsMapper hiveJobsMapper; 17 | 18 | public List getAllHiveJobs() { 19 | List result = null; 20 | try { 21 | result = hiveJobsMapper.getAllHiveJobs(); 22 | } catch (Exception e) { 23 | // Ignore sql exception 24 | logger.error("get all hive job info failed", e); 25 | } 26 | return result; 27 | } 28 | 29 | public HiveJobsDo getHiveJob(String dbName, String tableName) { 30 | HiveJobsDo result = null; 31 | try { 32 | result = hiveJobsMapper.getHiveJobs(dbName, tableName); 33 | } catch (Exception e) { 34 | // Ignore sql exception 35 | logger.error("get hive job info failed", e); 36 | } 37 | 38 | return result; 39 | } 40 | 41 | /** 42 | * add the hive jobs, update if it exists in database 43 | * 44 | * @param hiveJobsDao 45 | */ 46 | @Transactional 47 | public void addHiveJobs(HiveJobsDo hiveJobsDao) { 48 | logger.info("begin to add hive job : " + hiveJobsDao); 49 | 50 | try { 51 | HiveJobsDo hjd = hiveJobsMapper.getHiveJobs( 52 | hiveJobsDao.getDbname(), hiveJobsDao.getTablename()); 53 | if (null == hjd) { 54 | hiveJobsMapper.insertHiveJobs(hiveJobsDao); 55 | } else { 56 | hiveJobsDao.setCreate_time(hjd.getCreate_time()); 57 | hiveJobsMapper.updateHiveJobs(hiveJobsDao); 58 | } 59 | 60 | } catch (Exception e) { 61 | logger.error("add hive job info failed, rollback...", e); 62 | } 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /di-data-service/src/main/resources/conf/metric/portal_hadoop_config_test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Region Server平均Compact所需时间 6 | Region Server平均写Size 7 | RPC在Queue里的时间(毫秒) 8 | RPC平均请求时长(毫秒) 9 | RPC平均写时长(毫秒) 10 | RPC平均读时长(毫秒) 11 | 12 | 13 | 14 | 15 | 16 | CPU报告 17 | RPC平均请求时长(毫秒) 18 | HBaseMaster请求次数 19 | Region Server Block Cache命中率 20 | Region Server Block Cache命中率 21 | 22 | 23 | 24 | 25 | 26 | 硬盘空闲空间 27 | 内存空闲空间 28 | 29 | 30 | 31 | 32 | App注册平均时间 33 | Task失败 34 | 启动Task平均时间 35 | 分配Task平均时间 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /di-data-service/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed to the Apache Software Foundation (ASF) under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. The ASF licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | # http://www.apache.org/licenses/LICENSE-2.0 20 | # 21 | # Unless required by applicable law or agreed to in writing, software 22 | # distributed under the License is distributed on an "AS IS" BASIS, 23 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 24 | # See the License for the specific language governing permissions and 25 | # limitations under the License. See accompanying LICENSE file. 26 | # 27 | log4j.rootLogger=INFO 28 | log4j.logger.com.ctrip.di=INFO, STDOUT, di 29 | 30 | log4j.appender.di=org.apache.log4j.DailyRollingFileAppender 31 | log4j.appender.di.File=${catalina.home}/logs/di-service.log 32 | log4j.appender.di.Append=true 33 | log4j.appender.di.layout=org.apache.log4j.PatternLayout 34 | log4j.appender.di.layout.ConversionPattern=%d{ISO8601} %5p %c{1}:%L - %m%n 35 | log4j.appender.di.RollingPolicy.MaxHistory=720 36 | log4j.appender.di.DatePattern='.'yyyy-MM-dd 37 | 38 | log4j.appender.STDOUT=org.apache.log4j.ConsoleAppender 39 | log4j.appender.STDOUT.layout=org.apache.log4j.PatternLayout 40 | log4j.appender.STDOUT.layout.ConversionPattern=%d{ISO8601} %5p %c{1}:%L - %m%n 41 | 42 | 43 | -------------------------------------------------------------------------------- /di-data-service/src/main/resources/conf/mybatis/hiveJobs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 22 | 23 | 24 | 29 | 30 | 35 | 36 | 37 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /monitor/src/monitor/static/js/jquery.livesearch.min.js: -------------------------------------------------------------------------------- 1 | jQuery.fn.liveSearch=function(conf){var config=jQuery.extend({url:'/search-results.php?q=',id:'jquery-live-search',duration:400,typeDelay:200,loadingClass:'loading',onSlideUp:function(){},uptadePosition:false},conf);var liveSearch=jQuery('#'+config.id);if(!liveSearch.length){liveSearch=jQuery('
').appendTo(document.body).hide().slideUp(0);jQuery(document.body).click(function(event){var clicked=jQuery(event.target);if(!(clicked.is('#'+config.id)||clicked.parents('#'+config.id).length||clicked.is('input'))){liveSearch.slideUp(config.duration,function(){config.onSlideUp()})}})}return this.each(function(){var input=jQuery(this).attr('autocomplete','off');var liveSearchPaddingBorderHoriz=parseInt(liveSearch.css('paddingLeft'),10)+parseInt(liveSearch.css('paddingRight'),10)+parseInt(liveSearch.css('borderLeftWidth'),10)+parseInt(liveSearch.css('borderRightWidth'),10);var repositionLiveSearch=function(){var tmpOffset=input.offset();var inputDim={left:tmpOffset.left,top:tmpOffset.top,width:input.outerWidth(),height:input.outerHeight()};inputDim.topPos=inputDim.top+inputDim.height;inputDim.totalWidth=inputDim.width-liveSearchPaddingBorderHoriz;liveSearch.css({position:'absolute',left:inputDim.left+'px',top:inputDim.topPos+'px',width:inputDim.totalWidth+'px'})};var showLiveSearch=function(){repositionLiveSearch();$(window).unbind('resize',repositionLiveSearch);$(window).bind('resize',repositionLiveSearch);liveSearch.slideDown(config.duration)};var hideLiveSearch=function(){liveSearch.slideUp(config.duration,function(){config.onSlideUp()})};input.focus(function(){if(this.value!==''){if(liveSearch.html()==''){this.lastValue='';input.keyup()}else{setTimeout(showLiveSearch,1)}}}).keyup(function(){if(this.value!=this.lastValue){input.addClass(config.loadingClass);var q=this.value;if(this.timer){clearTimeout(this.timer)}this.timer=setTimeout(function(){jQuery.get(config.url+q,function(data){input.removeClass(config.loadingClass);if(data.length&&q.length){liveSearch.html(data);showLiveSearch()}else{hideLiveSearch()}})},config.typeDelay);this.lastValue=this.value}})})}; -------------------------------------------------------------------------------- /di-data-service/src/test/java/com/ctrip/di/hive/TestHiveJob.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.hive; 2 | 3 | import java.util.Date; 4 | 5 | import net.sf.json.JSONObject; 6 | 7 | import org.junit.Test; 8 | import org.springframework.context.ApplicationContext; 9 | import org.springframework.context.support.ClassPathXmlApplicationContext; 10 | 11 | import com.ctrip.di.dao.hive.HiveJobsDo; 12 | import com.ctrip.di.dao.hive.HiveJobsService; 13 | import com.ctrip.di.hive.HiveCheckService; 14 | import com.ctrip.di.hive.HivePartitionCleanService; 15 | 16 | public class TestHiveJob { 17 | 18 | // @Test 19 | public void testHiveCleanJob() { 20 | ApplicationContext context = new ClassPathXmlApplicationContext("rest-servlet.xml"); 21 | 22 | HiveJobsService hpcj = context.getBean(HiveJobsService.class); 23 | 24 | HiveJobsDo hiveJobsDo = hpcj.getHiveJob("test", "test3"); 25 | 26 | JSONObject json = JSONObject.fromObject(hiveJobsDo); 27 | 28 | System.out.println(json.toString()); 29 | 30 | System.out.println(hiveJobsDo.toString()); 31 | } 32 | 33 | @Test 34 | public void testAddHiveJob() { 35 | ApplicationContext context = new ClassPathXmlApplicationContext("rest-servlet.xml"); 36 | 37 | HiveJobsService hpcj = context.getBean(HiveJobsService.class); 38 | 39 | HiveJobsDo hiveJobsDo = new HiveJobsDo(); 40 | hiveJobsDo.setDbname("test"); 41 | hiveJobsDo.setTablename("test5"); 42 | hiveJobsDo.setPt_format("pt=${yyyy-MM-dd}"); 43 | hiveJobsDo.setKeepdays(20); 44 | hiveJobsDo.setCheckrate(0.3); 45 | hiveJobsDo.setUsername("jianguo"); 46 | hiveJobsDo.setEmail("jianguo@ctrip.com"); 47 | long time = System.currentTimeMillis(); 48 | hiveJobsDo.setCreate_time(new Date(time)); 49 | hiveJobsDo.setModified_time(new Date(time)); 50 | 51 | hpcj.addHiveJobs(hiveJobsDo); 52 | } 53 | 54 | // @Test 55 | public void testCheckHiveJob() { 56 | ApplicationContext context = new ClassPathXmlApplicationContext("rest-servlet.xml"); 57 | 58 | HiveCheckService hcs = context.getBean(HiveCheckService.class); 59 | 60 | hcs.checkHiveJob("test", "test2"); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/controller/HdfsController.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.controller; 2 | 3 | import java.util.List; 4 | 5 | import javax.servlet.http.HttpServletRequest; 6 | import javax.servlet.http.HttpServletResponse; 7 | 8 | import net.sf.json.JSONArray; 9 | import net.sf.json.JSONObject; 10 | 11 | import org.apache.commons.logging.Log; 12 | import org.apache.commons.logging.LogFactory; 13 | import org.springframework.beans.factory.annotation.Autowired; 14 | import org.springframework.stereotype.Controller; 15 | import org.springframework.web.bind.annotation.RequestMapping; 16 | 17 | import com.ctrip.di.common.jmx.HdfsJmxService; 18 | import com.ctrip.di.common.util.PrintWriterUtil; 19 | import com.ctrip.di.hdfs.HdfsDirSummary; 20 | import com.ctrip.di.hdfs.HdfsFileSummaryService; 21 | /** 22 | * APIs to get information from hdfs cluster 23 | * @author xgliao 24 | * 25 | */ 26 | @Controller 27 | @RequestMapping("/hdfs") 28 | public class HdfsController { 29 | private static Log logger = LogFactory.getLog(HdfsController.class); 30 | 31 | @Autowired 32 | private HdfsFileSummaryService fileSummaryService; 33 | 34 | @Autowired 35 | private HdfsJmxService hadoopJmxService; 36 | 37 | @RequestMapping("/get/userfileinfo") 38 | public void getUserFileInfo(HttpServletRequest request, 39 | HttpServletResponse response) { 40 | List hdfsSummaryList = fileSummaryService 41 | .getContentSummaryList(); 42 | 43 | JSONArray jsonArray = JSONArray.fromObject(hdfsSummaryList); 44 | 45 | PrintWriterUtil.writeJson(request, response, jsonArray.toString()); 46 | } 47 | 48 | @RequestMapping("/get/hdfsusageinfo") 49 | public void getHdfsClusterInfo(HttpServletRequest request, 50 | HttpServletResponse response) { 51 | try { 52 | JSONObject jsonObject = hadoopJmxService.getJmxBean(); 53 | PrintWriterUtil.writeJson(request, response, jsonObject.toString()); 54 | } catch (Exception e) { 55 | logger.error("Jmx Service Exception", e); 56 | PrintWriterUtil.writeError(request, response, 57 | "Get hdfs usage information error " + e.getMessage()); 58 | } 59 | } 60 | 61 | } 62 | -------------------------------------------------------------------------------- /monitor/src/monitor/static/js/date.js: -------------------------------------------------------------------------------- 1 | 2 | function getTimeStr(year, month,day ,hour,mi){ 3 | return month +"%2F"+ day +"%2F"+year+"+"+hour+"%3A"+mi; 4 | } 5 | 6 | function getTimeStrNormal(year, month,day ,hour,mi){ 7 | return month +"/"+ day +"/"+year+"+"+hour+":"+mi; 8 | } 9 | 10 | Date.prototype.format = function (format) { 11 | var o = { 12 | "M+": this.getMonth() + 1, 13 | "d+": this.getDate(), 14 | "h+": this.getHours(), 15 | "m+": this.getMinutes(), 16 | "s+": this.getSeconds(), 17 | "q+": Math.floor((this.getMonth() + 3) / 3), 18 | "S": this.getMilliseconds() 19 | } 20 | if (/(y+)/.test(format)) { 21 | format = format.replace(RegExp.$1, (this.getFullYear() + "").substr(4 - RegExp.$1.length)); 22 | } 23 | for (var k in o) { 24 | if (new RegExp("(" + k + ")").test(format)) { 25 | format = format.replace(RegExp.$1, RegExp.$1.length == 1 ? o[k] : ("00" + o[k]).substr(("" + o[k]).length)); 26 | } 27 | } 28 | 29 | return format; 30 | } 31 | 32 | function getSmpFormatDate(date, isFull) { 33 | var pattern = ""; 34 | if (isFull == true || isFull == undefined) { 35 | pattern = "yyyy-MM-dd hh:mm:ss"; 36 | } else { 37 | pattern = "yyyy-MM-dd"; 38 | } 39 | return getFormatDate(date, pattern); 40 | } 41 | 42 | function getSmpFormatNowDate(isFull) { 43 | return getSmpFormatDate(new Date(), isFull); 44 | } 45 | 46 | function getSmpFormatDateByLong(l, isFull) { 47 | 48 | return getSmpFormatDate(new Date(l*1000), isFull); 49 | } 50 | 51 | function getFormatDateByLong(l, pattern) { 52 | return getFormatDate(new Date(l), pattern); 53 | } 54 | 55 | function getFormatDate(date, pattern) { 56 | if (date == undefined) { 57 | date = new Date(); 58 | } 59 | if (pattern == undefined) { 60 | pattern = "yyyy-MM-dd hh:mm:ss"; 61 | } 62 | return date.format(pattern); 63 | } 64 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/ganglia/GangliaHttpParser.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.ganglia; 2 | 3 | import java.io.IOException; 4 | import java.net.MalformedURLException; 5 | import java.net.URL; 6 | import java.util.ArrayList; 7 | import java.util.List; 8 | 9 | import org.htmlparser.Node; 10 | import org.htmlparser.NodeFilter; 11 | import org.htmlparser.Parser; 12 | import org.htmlparser.filters.AndFilter; 13 | import org.htmlparser.filters.HasAttributeFilter; 14 | import org.htmlparser.filters.TagNameFilter; 15 | import org.htmlparser.tags.OptionTag; 16 | import org.htmlparser.util.NodeList; 17 | import org.htmlparser.util.ParserException; 18 | import org.htmlparser.util.SimpleNodeIterator; 19 | import org.springframework.beans.factory.annotation.Value; 20 | import org.springframework.stereotype.Component; 21 | 22 | /** 23 | * Ganglia http Parser: get ganglia attribute by cluster name. 24 | * And return a list of attribute 25 | * @author xgliao 26 | * 27 | */ 28 | @Component 29 | public class GangliaHttpParser { 30 | @Value("${GANGLIA_METRIC_URL}") 31 | private String gangliaMetricUrl; 32 | @Value("${GANGLIA_METRIC_URL_ClUSTER_PATTERN}") 33 | private String clusterPattern; 34 | 35 | public List getGangliaAttribute(String clusterName) 36 | throws ParserException, MalformedURLException, IOException { 37 | String url = gangliaMetricUrl.replaceAll(clusterPattern, clusterName); 38 | Parser parser = new Parser(new URL(url).openConnection()); 39 | NodeFilter nodeFilter = new AndFilter(new TagNameFilter("select"), 40 | new HasAttributeFilter("id", "metrics-picker")); 41 | NodeList nodeList = parser.extractAllNodesThatMatch(nodeFilter); 42 | SimpleNodeIterator iterator = nodeList.elements(); 43 | List metricList = new ArrayList(); 44 | while (iterator.hasMoreNodes()) { 45 | Node node = iterator.nextNode(); 46 | 47 | SimpleNodeIterator childIterator = node.getChildren().elements(); 48 | while (childIterator.hasMoreNodes()) { 49 | OptionTag children = (OptionTag) childIterator.nextNode(); 50 | metricList.add(children.getOptionText()); 51 | } 52 | } 53 | 54 | return metricList; 55 | 56 | } 57 | 58 | } 59 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/dao/YarnJobsDo.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.dao; 2 | 3 | /** 4 | * Yarn jobs data object 5 | * @author xgliao 6 | * 7 | */ 8 | public class YarnJobsDo { 9 | private long id; 10 | private long startTime; 11 | private long finishTime; 12 | private String jobId; 13 | private String queue; 14 | private String user; 15 | private String status; 16 | private int mapsTotal; 17 | private int reducesTotal; 18 | private String dateStr; 19 | 20 | public String getDateStr() { 21 | return dateStr; 22 | } 23 | 24 | public void setDateStr(String dateStr) { 25 | this.dateStr = dateStr; 26 | } 27 | 28 | public long getId() { 29 | return id; 30 | } 31 | 32 | public void setId(long id) { 33 | this.id = id; 34 | } 35 | 36 | public long getStartTime() { 37 | return startTime; 38 | } 39 | 40 | public void setStartTime(long startTime) { 41 | this.startTime = startTime; 42 | } 43 | 44 | public long getFinishTime() { 45 | return finishTime; 46 | } 47 | 48 | public void setFinishTime(long finishTime) { 49 | this.finishTime = finishTime; 50 | } 51 | 52 | public String getJobId() { 53 | return jobId; 54 | } 55 | 56 | public void setJobId(String jobId) { 57 | this.jobId = jobId; 58 | } 59 | 60 | public String getQueue() { 61 | return queue; 62 | } 63 | 64 | public void setQueue(String queue) { 65 | this.queue = queue; 66 | } 67 | 68 | public String getUser() { 69 | return user; 70 | } 71 | 72 | public void setUser(String user) { 73 | this.user = user; 74 | } 75 | 76 | public String getStatus() { 77 | return status; 78 | } 79 | 80 | public void setStatus(String status) { 81 | this.status = status; 82 | } 83 | 84 | public int getMapsTotal() { 85 | return mapsTotal; 86 | } 87 | 88 | public void setMapsTotal(int mapsTotal) { 89 | this.mapsTotal = mapsTotal; 90 | } 91 | 92 | public int getReducesTotal() { 93 | return reducesTotal; 94 | } 95 | 96 | public void setReducesTotal(int reducesTotal) { 97 | this.reducesTotal = reducesTotal; 98 | } 99 | 100 | } 101 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/common/jmx/YarnJmxService.java: -------------------------------------------------------------------------------- 1 | /*package com.ctrip.di.common.jmx; 2 | 3 | import javax.management.ObjectName; 4 | 5 | import net.sf.json.JSONObject; 6 | 7 | import org.springframework.beans.factory.annotation.Value; 8 | import org.springframework.stereotype.Service; 9 | 10 | @Service 11 | public class YarnJmxService extends AbstactJmxService { 12 | 13 | @Value("${YARN_JMX_SERVER}") 14 | private String yarnJmxServer; 15 | 16 | @Override 17 | protected String getJmxServer() { 18 | return yarnJmxServer; 19 | } 20 | 21 | @Override 22 | protected JSONObject getNewJmxBean() throws Exception { 23 | YarnJmxBean yarnJmxBean = new YarnJmxBean(); 24 | 25 | ObjectName on = new ObjectName( 26 | "Hadoop:service=ResourceManager,name=QueueMetrics,q0=root"); 27 | Object appsCompleted = mbsc.getAttribute(on, "AppsCompleted"); 28 | if (appsCompleted != null) { 29 | yarnJmxBean.setAppsCompleted((Integer)appsCompleted); 30 | } 31 | 32 | Object appsFailed = mbsc.getAttribute(on, "AppsFailed"); 33 | if (appsFailed != null) { 34 | yarnJmxBean.setAppsFailed((Integer)appsFailed); 35 | } 36 | 37 | Object appsKilled = mbsc.getAttribute(on, "AppsKilled"); 38 | if (appsKilled != null) { 39 | yarnJmxBean.setAppsKilled((Integer)appsKilled); 40 | } 41 | 42 | Object appsPending = mbsc.getAttribute(on, "AppsPending"); 43 | if (appsPending != null) { 44 | yarnJmxBean.setAppsPending((Integer)appsPending); 45 | } 46 | 47 | Object appsRunning = mbsc.getAttribute(on, "AppsRunning"); 48 | if (appsRunning != null) { 49 | yarnJmxBean.setAppsRunning((Integer)appsRunning); 50 | } 51 | 52 | Object appsSubmitted = mbsc.getAttribute(on, "AppsSubmitted"); 53 | if (appsSubmitted != null) { 54 | yarnJmxBean.setAppsSumitted((Integer)appsSubmitted); 55 | } 56 | 57 | ObjectName rmon = new ObjectName( 58 | "Hadoop:service=ResourceManager,name=RMNMInfo"); 59 | Object liveNodeManagers = mbsc.getAttribute(rmon, "LiveNodeManagers"); 60 | if (liveNodeManagers != null) { 61 | yarnJmxBean.setLiveNodeManagers((String)liveNodeManagers); 62 | } 63 | 64 | JSONObject json = JSONObject.fromObject(yarnJmxBean); 65 | return json; 66 | } 67 | 68 | } 69 | */ -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/common/jmx/AbstractJmxService.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.common.jmx; 2 | 3 | import javax.annotation.PostConstruct; 4 | import javax.management.MBeanServerConnection; 5 | import javax.management.remote.JMXConnector; 6 | import javax.management.remote.JMXConnectorFactory; 7 | import javax.management.remote.JMXServiceURL; 8 | 9 | import net.sf.json.JSONObject; 10 | 11 | import org.apache.commons.logging.Log; 12 | import org.apache.commons.logging.LogFactory; 13 | import org.springframework.scheduling.annotation.Scheduled; 14 | 15 | /** 16 | * Abstract class for jmx service 17 | * @author xgliao 18 | * 19 | */ 20 | public abstract class AbstractJmxService { 21 | private static Log logger = LogFactory.getLog(AbstractJmxService.class); 22 | 23 | protected MBeanServerConnection mbsc; 24 | 25 | protected volatile JSONObject json = new JSONObject(); 26 | 27 | @PostConstruct 28 | public void initConnection() throws Exception { 29 | JMXServiceURL url = new JMXServiceURL(getJmxServer()); 30 | 31 | JMXConnector jmxc = JMXConnectorFactory.connect(url); 32 | 33 | mbsc = jmxc.getMBeanServerConnection(); 34 | } 35 | 36 | /** 37 | * provide jmx server url 38 | * @return string 39 | */ 40 | protected abstract String getJmxServer(); 41 | 42 | /** 43 | * Get newer jmx bean 44 | * @return jsonobject 45 | * @throws Exception 46 | */ 47 | protected abstract JSONObject getNewJmxBean() throws Exception; 48 | 49 | /** 50 | * Get a jmx bean 51 | * @return 52 | * @throws Exception 53 | */ 54 | public JSONObject getJmxBean() throws Exception { 55 | if (json.isEmpty()) { 56 | synchronized (json) { 57 | if (json.isEmpty()) { 58 | JSONObject newJson = getNewJmxBean(); 59 | json = newJson; 60 | } 61 | } 62 | } 63 | 64 | return json; 65 | } 66 | 67 | /** 68 | * Run every half hour 69 | * @throws Exception 70 | */ 71 | @Scheduled(fixedDelay = 1800 * 1000) 72 | public void run() throws Exception { 73 | logger.info("Start get new jmx bean while jmx server is " 74 | + getJmxServer()); 75 | JSONObject newJson = getNewJmxBean(); 76 | logger.info("End get new jmx bean while jmx server is " 77 | + getJmxServer()); 78 | json = newJson; 79 | } 80 | 81 | } 82 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/dao/hive/HiveJobsDo.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.dao.hive; 2 | 3 | import java.util.Date; 4 | 5 | public class HiveJobsDo { 6 | 7 | private String dbname; 8 | private String tablename; 9 | private String pt_format; 10 | private int keepdays; 11 | private double checkrate; 12 | private String username; 13 | private String email; 14 | private Date create_time; 15 | private Date modified_time; 16 | 17 | public String getDbname() { 18 | return dbname; 19 | } 20 | 21 | public void setDbname(String dbname) { 22 | this.dbname = dbname; 23 | } 24 | 25 | public String getTablename() { 26 | return tablename; 27 | } 28 | 29 | public void setTablename(String tablename) { 30 | this.tablename = tablename; 31 | } 32 | 33 | public String getPt_format() { 34 | return pt_format; 35 | } 36 | 37 | public void setPt_format(String pt_format) { 38 | this.pt_format = pt_format; 39 | } 40 | 41 | public int getKeepdays() { 42 | return keepdays; 43 | } 44 | 45 | public void setKeepdays(int keepdays) { 46 | this.keepdays = keepdays; 47 | } 48 | 49 | public double getCheckrate() { 50 | return checkrate; 51 | } 52 | 53 | public void setCheckrate(double d) { 54 | this.checkrate = d; 55 | } 56 | 57 | public String getUsername() { 58 | return username; 59 | } 60 | 61 | public void setUsername(String username) { 62 | this.username = username; 63 | } 64 | 65 | public String getEmail() { 66 | return email; 67 | } 68 | 69 | public void setEmail(String email) { 70 | this.email = email; 71 | } 72 | 73 | public Date getCreate_time() { 74 | return create_time; 75 | } 76 | 77 | public void setCreate_time(Date create_time) { 78 | this.create_time = create_time; 79 | } 80 | 81 | public Date getModified_time() { 82 | return modified_time; 83 | } 84 | 85 | public void setModified_time(Date modified_time) { 86 | this.modified_time = modified_time; 87 | } 88 | 89 | @Override 90 | public String toString() { 91 | return "dbname:" + dbname + " tablename:" + tablename + " pt:" 92 | + pt_format + " days:" + keepdays + " checkrate:" + checkrate 93 | + " username:" + username + " email:" + email + " createtime:" 94 | + create_time + " modifiedtime:" + modified_time; 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/controller/MetricsController.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.controller; 2 | 3 | import java.util.List; 4 | 5 | import javax.servlet.http.HttpServletRequest; 6 | import javax.servlet.http.HttpServletResponse; 7 | 8 | import net.sf.json.JSONObject; 9 | 10 | import org.springframework.beans.factory.annotation.Autowired; 11 | import org.springframework.stereotype.Controller; 12 | import org.springframework.web.bind.annotation.RequestMapping; 13 | 14 | import com.ctrip.di.common.MetricConfigParser; 15 | import com.ctrip.di.common.util.PrintWriterUtil; 16 | import com.ctrip.di.ganglia.GangliaMetricService; 17 | 18 | /** 19 | * API to get metric config informations 20 | * @author xgliao 21 | * 22 | */ 23 | 24 | @Controller 25 | @RequestMapping("/metric") 26 | public class MetricsController { 27 | 28 | @Autowired 29 | private MetricConfigParser configParser; 30 | 31 | @Autowired 32 | private GangliaMetricService metricService; 33 | 34 | @RequestMapping("/getmetriclist") 35 | public void getMetricList(HttpServletRequest request, 36 | HttpServletResponse response) { 37 | response.setContentType("application/json"); 38 | String clusterName = request.getParameter("clustername"); 39 | if (clusterName == null) { 40 | PrintWriterUtil.writeError(request, response, 41 | "Cluster Name Can Not be null"); 42 | return; 43 | } 44 | 45 | List metricList = metricService 46 | .getVIMetricsByCluster(clusterName); 47 | 48 | JSONObject json = new JSONObject(); 49 | json.put("metrics", metricList); 50 | PrintWriterUtil.writeJson(request, response, json.toString()); 51 | } 52 | 53 | @RequestMapping("/getallmetriclist") 54 | public void getAllMetricList(HttpServletRequest request, 55 | HttpServletResponse response) { 56 | response.setContentType("application/json"); 57 | String clusterName = request.getParameter("clustername"); 58 | if (clusterName == null) { 59 | PrintWriterUtil.writeError(request, response, 60 | "Cluster Name Can Not be null"); 61 | return; 62 | } 63 | 64 | List metricList = metricService 65 | .getMetricsByCluster(clusterName); 66 | JSONObject json = new JSONObject(); 67 | json.put("metrics", metricList); 68 | PrintWriterUtil.writeJson(request, response, json.toString()); 69 | } 70 | 71 | } 72 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/dao/YarnJobService.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.dao; 2 | 3 | import java.util.List; 4 | 5 | import org.springframework.beans.factory.annotation.Autowired; 6 | import org.springframework.stereotype.Service; 7 | import org.springframework.transaction.annotation.Transactional; 8 | 9 | /** 10 | * Yarn job service: crawler the jobs from yarn cluster and insert the job list into mysql. 11 | * And static the value by different dimensions. 12 | * @author xgliao 13 | * 14 | */ 15 | @Service 16 | public class YarnJobService { 17 | 18 | @Autowired 19 | private YarnJobsMapper yarnJobsMapper; 20 | 21 | @Transactional 22 | public void insertJobs(List jobList) { 23 | for (YarnJobsDo job : jobList) { 24 | yarnJobsMapper.insertYarnJob(job); 25 | } 26 | } 27 | 28 | public long getMaxStartTime() { 29 | Long maxTime = yarnJobsMapper.getMaxStartTime(); 30 | if(maxTime == null) { 31 | return 0; 32 | } 33 | return maxTime; 34 | } 35 | 36 | public List getAllCount() { 37 | return yarnJobsMapper.getAllCount(); 38 | } 39 | 40 | public List getCountByDateUser() { 41 | return yarnJobsMapper.getCountByDateUser(); 42 | } 43 | 44 | public List getCountByDateUserD(int date) { 45 | return yarnJobsMapper.getCountByDateUserD(date); 46 | } 47 | 48 | public List getCountByDateUserForPage(int start, int num) { 49 | return yarnJobsMapper.getCountByDateUserForPage(start, num); 50 | } 51 | 52 | public List getYarnJobUserByDate(String date) { 53 | return yarnJobsMapper.getYarnJobUserByDate(date); 54 | } 55 | 56 | public List getYarnJobUserByUserName(String userName) { 57 | return yarnJobsMapper.getYarnJobUserByUserName(userName); 58 | } 59 | 60 | public List getNewestUserJobCount() { 61 | return yarnJobsMapper.getNewestUserJobCount(); 62 | } 63 | 64 | public List getUserJobCountByDate(int date) { 65 | return yarnJobsMapper.getUserJobCountByDate(date); 66 | } 67 | 68 | public List getJobCountByDate(int date) { 69 | return yarnJobsMapper.getJobCountByDate(date); 70 | } 71 | 72 | public List getJobCountByPageRange(int start, int end) { 73 | return yarnJobsMapper.getJobCountByPageRange(start, end); 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /sparksql/src/sparksql/templates/jobs.mako: -------------------------------------------------------------------------------- 1 | <%! 2 | from desktop.views import commonheader, commonfooter 3 | from sparksql.conf import DATA_SERVICE 4 | %> 5 | 6 | <%namespace name="shared" file="shared_components.mako" /> 7 | 8 | ${commonheader("Sparksql", "sparksql", user, "100px")|n,unicode} 9 | 10 | ## Use double hashes for a mako template comment 11 | ## Main body 12 | 13 | 14 | 22 | 23 |
24 |
25 | 26 |
27 |
用户历史查询记录
28 | 29 |
30 |
31 | 每页显示条记录 38 |
39 | 40 |
41 | 42 |
43 | 44 | 下一页 45 | 上一页 46 | 首页 47 |
48 |
49 |
50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | ${commonfooter(messages)|n,unicode} 58 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/pojo/gen/HostList.java: -------------------------------------------------------------------------------- 1 | // 2 | // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 3 | // See http://java.sun.com/xml/jaxb 4 | // Any modifications to this file will be lost upon recompilation of the source schema. 5 | // Generated on: 2014.08.19 at 06:42:37 ���� CST 6 | // 7 | 8 | 9 | package com.ctrip.di.pojo.gen; 10 | 11 | import java.util.ArrayList; 12 | import java.util.List; 13 | import javax.xml.bind.annotation.XmlAccessType; 14 | import javax.xml.bind.annotation.XmlAccessorType; 15 | import javax.xml.bind.annotation.XmlElement; 16 | import javax.xml.bind.annotation.XmlRootElement; 17 | import javax.xml.bind.annotation.XmlType; 18 | 19 | 20 | /** 21 | *

Java class for anonymous complex type. 22 | * 23 | *

The following schema fragment specifies the expected content contained within this class. 24 | * 25 | *

26 |  * <complexType>
27 |  *   <complexContent>
28 |  *     <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
29 |  *       <sequence>
30 |  *         <element ref="{}host" maxOccurs="unbounded"/>
31 |  *       </sequence>
32 |  *     </restriction>
33 |  *   </complexContent>
34 |  * </complexType>
35 |  * 
36 | * 37 | * 38 | */ 39 | @XmlAccessorType(XmlAccessType.FIELD) 40 | @XmlType(name = "", propOrder = { 41 | "host" 42 | }) 43 | @XmlRootElement(name = "host_list") 44 | public class HostList { 45 | 46 | @XmlElement(required = true) 47 | protected List host; 48 | 49 | /** 50 | * Gets the value of the host property. 51 | * 52 | *

53 | * This accessor method returns a reference to the live list, 54 | * not a snapshot. Therefore any modification you make to the 55 | * returned list will be present inside the JAXB object. 56 | * This is why there is not a set method for the host property. 57 | * 58 | *

59 | * For example, to add a new item, do as follows: 60 | *

61 |      *    getHost().add(newItem);
62 |      * 
63 | * 64 | * 65 | *

66 | * Objects of the following type(s) are allowed in the list 67 | * {@link String } 68 | * 69 | * 70 | */ 71 | public List getHost() { 72 | if (host == null) { 73 | host = new ArrayList(); 74 | } 75 | return this.host; 76 | } 77 | 78 | } 79 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/common/authenticate/MessageDigestAuthenticate.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.common.authenticate; 2 | 3 | import java.security.MessageDigest; 4 | import java.security.NoSuchAlgorithmException; 5 | 6 | import org.apache.commons.logging.Log; 7 | import org.apache.commons.logging.LogFactory; 8 | import org.springframework.beans.factory.annotation.Autowired; 9 | import org.springframework.stereotype.Service; 10 | 11 | import com.ctrip.di.dao.user.AuthUser; 12 | import com.ctrip.di.dao.user.AuthUserMapper; 13 | 14 | /** 15 | * Message Digest authenticate. 16 | * Currently, there is two type of users in di portal. One is Ldap user, 17 | * and the other is hadoop user. 18 | * @author xgliao 19 | * 20 | */ 21 | @Service 22 | public class MessageDigestAuthenticate implements IAuthenticate { 23 | private static Log logger = LogFactory 24 | .getLog(MessageDigestAuthenticate.class); 25 | 26 | private static final char[] HEX_DIGITS = { '0', '1', '2', '3', '4', '5', 27 | '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; 28 | 29 | @Autowired 30 | private AuthUserMapper authUserMapper; 31 | 32 | @Override 33 | public boolean authenticate(String username, String password) { 34 | AuthUser user = authUserMapper.getAuthUser(username); 35 | String[] parts = user.getPassword().split("\\$"); 36 | if (parts.length != 3) { 37 | logger.info("User password format is not right, while user is " 38 | + username); 39 | return false; 40 | } 41 | 42 | String digestPassword = getDigestPassword(parts[0], parts[1], password); 43 | if(digestPassword != null && digestPassword.equalsIgnoreCase(parts[2])) { 44 | return true; 45 | } 46 | 47 | return false; 48 | } 49 | 50 | public String getDigestPassword(String algorithm, String salt, 51 | String password) { 52 | MessageDigest meesageDigest = null; 53 | try { 54 | meesageDigest = MessageDigest.getInstance(algorithm); 55 | } catch (NoSuchAlgorithmException e) { 56 | logger.warn("Message Digest Exception:", e); 57 | return null; 58 | } 59 | meesageDigest.update((salt + password).getBytes()); 60 | 61 | return getFormattedText(meesageDigest.digest()); 62 | } 63 | 64 | private String getFormattedText(byte[] bytes) { 65 | int len = bytes.length; 66 | 67 | StringBuilder buf = new StringBuilder(len * 2); 68 | 69 | for (int j = 0; j < len; j++) { 70 | buf.append(HEX_DIGITS[(bytes[j] >> 4) & 0x0f]); 71 | buf.append(HEX_DIGITS[bytes[j] & 0x0f]); 72 | } 73 | return buf.toString(); 74 | } 75 | 76 | } 77 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/pojo/gen/Metrics.java: -------------------------------------------------------------------------------- 1 | // 2 | // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 3 | // See http://java.sun.com/xml/jaxb 4 | // Any modifications to this file will be lost upon recompilation of the source schema. 5 | // Generated on: 2014.08.19 at 06:42:37 ���� CST 6 | // 7 | 8 | 9 | package com.ctrip.di.pojo.gen; 10 | 11 | import java.util.ArrayList; 12 | import java.util.List; 13 | import javax.xml.bind.annotation.XmlAccessType; 14 | import javax.xml.bind.annotation.XmlAccessorType; 15 | import javax.xml.bind.annotation.XmlElement; 16 | import javax.xml.bind.annotation.XmlRootElement; 17 | import javax.xml.bind.annotation.XmlType; 18 | 19 | 20 | /** 21 | *

Java class for anonymous complex type. 22 | * 23 | *

The following schema fragment specifies the expected content contained within this class. 24 | * 25 | *

26 |  * <complexType>
27 |  *   <complexContent>
28 |  *     <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
29 |  *       <sequence>
30 |  *         <element ref="{}metric" maxOccurs="unbounded"/>
31 |  *       </sequence>
32 |  *     </restriction>
33 |  *   </complexContent>
34 |  * </complexType>
35 |  * 
36 | * 37 | * 38 | */ 39 | @XmlAccessorType(XmlAccessType.FIELD) 40 | @XmlType(name = "", propOrder = { 41 | "metric" 42 | }) 43 | @XmlRootElement(name = "metrics") 44 | public class Metrics { 45 | 46 | @XmlElement(required = true) 47 | protected List metric; 48 | 49 | /** 50 | * Gets the value of the metric property. 51 | * 52 | *

53 | * This accessor method returns a reference to the live list, 54 | * not a snapshot. Therefore any modification you make to the 55 | * returned list will be present inside the JAXB object. 56 | * This is why there is not a set method for the metric property. 57 | * 58 | *

59 | * For example, to add a new item, do as follows: 60 | *

61 |      *    getMetric().add(newItem);
62 |      * 
63 | * 64 | * 65 | *

66 | * Objects of the following type(s) are allowed in the list 67 | * {@link Metric } 68 | * 69 | * 70 | */ 71 | public List getMetric() { 72 | if (metric == null) { 73 | metric = new ArrayList(); 74 | } 75 | return this.metric; 76 | } 77 | 78 | } 79 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/pojo/gen/Clusters.java: -------------------------------------------------------------------------------- 1 | // 2 | // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 3 | // See http://java.sun.com/xml/jaxb 4 | // Any modifications to this file will be lost upon recompilation of the source schema. 5 | // Generated on: 2014.08.19 at 06:42:37 ���� CST 6 | // 7 | 8 | 9 | package com.ctrip.di.pojo.gen; 10 | 11 | import java.util.ArrayList; 12 | import java.util.List; 13 | import javax.xml.bind.annotation.XmlAccessType; 14 | import javax.xml.bind.annotation.XmlAccessorType; 15 | import javax.xml.bind.annotation.XmlElement; 16 | import javax.xml.bind.annotation.XmlRootElement; 17 | import javax.xml.bind.annotation.XmlType; 18 | 19 | 20 | /** 21 | *

Java class for anonymous complex type. 22 | * 23 | *

The following schema fragment specifies the expected content contained within this class. 24 | * 25 | *

26 |  * <complexType>
27 |  *   <complexContent>
28 |  *     <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
29 |  *       <sequence>
30 |  *         <element ref="{}cluster" maxOccurs="unbounded"/>
31 |  *       </sequence>
32 |  *     </restriction>
33 |  *   </complexContent>
34 |  * </complexType>
35 |  * 
36 | * 37 | * 38 | */ 39 | @XmlAccessorType(XmlAccessType.FIELD) 40 | @XmlType(name = "", propOrder = { 41 | "cluster" 42 | }) 43 | @XmlRootElement(name = "clusters") 44 | public class Clusters { 45 | 46 | @XmlElement(required = true) 47 | protected List cluster; 48 | 49 | /** 50 | * Gets the value of the cluster property. 51 | * 52 | *

53 | * This accessor method returns a reference to the live list, 54 | * not a snapshot. Therefore any modification you make to the 55 | * returned list will be present inside the JAXB object. 56 | * This is why there is not a set method for the cluster property. 57 | * 58 | *

59 | * For example, to add a new item, do as follows: 60 | *

61 |      *    getCluster().add(newItem);
62 |      * 
63 | * 64 | * 65 | *

66 | * Objects of the following type(s) are allowed in the list 67 | * {@link Cluster } 68 | * 69 | * 70 | */ 71 | public List getCluster() { 72 | if (cluster == null) { 73 | cluster = new ArrayList(); 74 | } 75 | return this.cluster; 76 | } 77 | 78 | } 79 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/controller/AuthUserController.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.controller; 2 | 3 | import java.io.IOException; 4 | import java.io.PrintWriter; 5 | 6 | import javax.servlet.http.HttpServletRequest; 7 | import javax.servlet.http.HttpServletResponse; 8 | 9 | import org.apache.commons.logging.Log; 10 | import org.apache.commons.logging.LogFactory; 11 | import org.springframework.beans.factory.annotation.Autowired; 12 | import org.springframework.stereotype.Controller; 13 | import org.springframework.web.bind.annotation.RequestMapping; 14 | 15 | import com.ctrip.di.common.authenticate.LdapAuthenticate; 16 | import com.ctrip.di.common.authenticate.MessageDigestAuthenticate; 17 | import com.ctrip.di.common.util.PrintWriterUtil; 18 | import com.ctrip.di.dao.user.AuthUser; 19 | import com.ctrip.di.dao.user.AuthUserMapper; 20 | 21 | /** 22 | * API to authenticate the user by username and password Example: 23 | * http://192.168.81 24 | * .177:8089/di-data-service/auth/authenticate?username=test_xg&password=test 25 | * 26 | * @author xgliao 27 | * 28 | */ 29 | @Controller 30 | @RequestMapping("/auth") 31 | public class AuthUserController { 32 | private static Log logger = LogFactory.getLog(AuthUserController.class); 33 | 34 | @Autowired 35 | private AuthUserMapper authUserMapper; 36 | 37 | @Autowired 38 | private LdapAuthenticate ldapAuthenticate; 39 | 40 | @Autowired 41 | private MessageDigestAuthenticate mdAuthenticate; 42 | 43 | @RequestMapping("/authenticate") 44 | public void authenticate(HttpServletRequest request, 45 | HttpServletResponse response) { 46 | String username = request.getParameter("username"); 47 | String password = request.getParameter("password"); 48 | if (username == null || password == null) { 49 | PrintWriterUtil.writeError(request, response, 50 | "username or password can not be null"); 51 | return; 52 | } 53 | AuthUser authUser = authUserMapper.getAuthUser(username); 54 | if (authUser == null) { 55 | PrintWriterUtil.writeError(request, response, 56 | "username is not exist"); 57 | return; 58 | } 59 | 60 | boolean isValid; 61 | if (authUser.getPassword().startsWith("!")) { 62 | isValid = ldapAuthenticate.authenticate(username, password); 63 | } else { 64 | isValid = mdAuthenticate.authenticate(username, password); 65 | } 66 | 67 | response.setContentType("application/json"); 68 | PrintWriter pw = null; 69 | try { 70 | pw = response.getWriter(); 71 | pw.println("{\"isvalid\":" + isValid + "}"); 72 | } catch (IOException e) { 73 | logger.error("Json Write Error:", e); 74 | } finally { 75 | if (pw != null) { 76 | pw.flush(); 77 | pw.close(); 78 | } 79 | } 80 | 81 | } 82 | 83 | } 84 | -------------------------------------------------------------------------------- /di-data-service/src/test/resources/rest-servlet.xml: -------------------------------------------------------------------------------- 1 | 2 | 7 | 8 | DI Portal 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 20 | 21 | 22 | 23 | 24 | 25 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /di-data-service/src/main/webapp/WEB-INF/rest-servlet.xml: -------------------------------------------------------------------------------- 1 | 2 | 7 | 8 | DI Portal 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 20 | 21 | 22 | 23 | 24 | 25 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/hive/alert/EMail.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.hive.alert; 2 | 3 | import java.util.Properties; 4 | 5 | import javax.mail.Authenticator; 6 | import javax.mail.Message; 7 | import javax.mail.MessagingException; 8 | import javax.mail.PasswordAuthentication; 9 | import javax.mail.Session; 10 | import javax.mail.Transport; 11 | import javax.mail.internet.AddressException; 12 | import javax.mail.internet.InternetAddress; 13 | import javax.mail.internet.MimeMessage; 14 | 15 | import org.apache.commons.logging.Log; 16 | import org.apache.commons.logging.LogFactory; 17 | 18 | public class EMail { 19 | private static final Log logger = LogFactory.getLog(EMail.class); 20 | 21 | public final static String HOST = "*"; 22 | public final static String USER = "*"; 23 | public final static String PASSWORD = "*"; 24 | public final static String FROM = "*"; 25 | 26 | /** 27 | * send email 28 | * 29 | * @throws MessagingException 30 | * @throws Exception 31 | */ 32 | public static int sendMail(String subject, String content, String mails, 33 | String cc) throws MessagingException { 34 | 35 | Properties props = new Properties(); 36 | props.put("mail.smtp.host", HOST); 37 | props.put("mail.smtp.starttls.enable", "true"); 38 | // props.put("mail.smtp.port", "25"); 39 | props.put("mail.smtp.auth", "true"); 40 | // props.put("mail.debug", "true"); 41 | Session mailSession = Session.getInstance(props, new MyAuthenticator()); 42 | 43 | Message message = new MimeMessage(mailSession); 44 | message.setFrom(new InternetAddress(FROM)); 45 | message.addRecipients(Message.RecipientType.TO, getMailList(mails)); 46 | message.addRecipients(Message.RecipientType.CC, getMailList(cc)); 47 | 48 | message.setSubject(subject); 49 | message.setContent(content, "text/html;charset=utf-8"); 50 | 51 | Transport transport = mailSession.getTransport("smtp"); 52 | try { 53 | transport.connect(HOST, USER, PASSWORD); 54 | transport.sendMessage(message, message.getAllRecipients()); 55 | } finally { 56 | if (transport != null) 57 | transport.close(); 58 | } 59 | 60 | return 0; 61 | } 62 | 63 | public static InternetAddress[] getMailList(String mails) { 64 | if (mails == null) { 65 | return null; 66 | } 67 | String[] toMails = mails.split(";"); 68 | InternetAddress[] to = new InternetAddress[toMails.length]; 69 | try { 70 | for (int i = 0; i < toMails.length; i++) { 71 | to[i] = new InternetAddress(toMails[i].trim()); 72 | } 73 | } catch (AddressException e) { 74 | logger.error("The email address error! Ignore:", e); 75 | } 76 | 77 | return to; 78 | } 79 | 80 | } 81 | 82 | class MyAuthenticator extends Authenticator { 83 | protected PasswordAuthentication getPasswordAuthentication() { 84 | return new PasswordAuthentication(EMail.USER, EMail.PASSWORD); 85 | } 86 | } -------------------------------------------------------------------------------- /di-data-service/src/test/java/com/ctrip/di/TestJmx.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di; 2 | 3 | import javax.management.MBeanServerConnection; 4 | import javax.management.ObjectName; 5 | import javax.management.remote.JMXConnector; 6 | import javax.management.remote.JMXConnectorFactory; 7 | import javax.management.remote.JMXServiceURL; 8 | 9 | import net.sf.json.JSONObject; 10 | 11 | import com.ctrip.di.common.jmx.YarnJmxBean; 12 | 13 | public class TestJmx { 14 | 15 | public static void main(String[] args) throws Exception { 16 | JMXServiceURL url = new JMXServiceURL( 17 | "service:jmx:rmi:///jndi/rmi://192.168.81.176:8026/jmxrmi"); 18 | 19 | JMXConnector jmxc = JMXConnectorFactory.connect(url); 20 | 21 | MBeanServerConnection mbsc = jmxc.getMBeanServerConnection(); 22 | 23 | YarnJmxBean yarnJmxBean = new YarnJmxBean(); 24 | 25 | ObjectName on = new ObjectName( 26 | "Hadoop:service=ResourceManager,name=QueueMetrics,q0=root"); 27 | Object appsCompleted = mbsc.getAttribute(on, "AppsCompleted"); 28 | if (appsCompleted != null) { 29 | yarnJmxBean.setAppsCompleted((Integer) appsCompleted); 30 | } 31 | 32 | Object appsFailed = mbsc.getAttribute(on, "AppsFailed"); 33 | if (appsFailed != null) { 34 | yarnJmxBean.setAppsFailed((Integer) appsFailed); 35 | } 36 | 37 | Object appsKilled = mbsc.getAttribute(on, "AppsKilled"); 38 | if (appsKilled != null) { 39 | yarnJmxBean.setAppsKilled((Integer) appsKilled); 40 | } 41 | 42 | Object appsPending = mbsc.getAttribute(on, "AppsPending"); 43 | if (appsPending != null) { 44 | yarnJmxBean.setAppsPending((Integer) appsPending); 45 | } 46 | 47 | Object appsRunning = mbsc.getAttribute(on, "AppsRunning"); 48 | if (appsRunning != null) { 49 | yarnJmxBean.setAppsRunning((Integer) appsRunning); 50 | } 51 | 52 | Object appsSubmitted = mbsc.getAttribute(on, "AppsSubmitted"); 53 | if (appsSubmitted != null) { 54 | yarnJmxBean.setAppsSumitted((Integer) appsSubmitted); 55 | } 56 | 57 | ObjectName rmon = new ObjectName( 58 | "Hadoop:service=ResourceManager,name=RMNMInfo"); 59 | Object liveNodeManagers = mbsc.getAttribute(rmon, "LiveNodeManagers"); 60 | if (liveNodeManagers != null) { 61 | yarnJmxBean.setLiveNodeManagers((String) liveNodeManagers); 62 | } 63 | 64 | JSONObject json = JSONObject.fromObject(yarnJmxBean); 65 | System.out.println(json); 66 | 67 | /* 68 | * Set mbeans = mbsc.getMBeanInfo(on); Iterator iter = 69 | * mbeans.iterator(); while (iter.hasNext()) { ObjectInstance oi = 70 | * (ObjectInstance) iter.next(); ObjectName objectName = 71 | * oi.getObjectName(); System.out.println("ObjectName: " + objectName); 72 | * MBeanInfo mi = mbsc.getMBeanInfo(objectName); MBeanAttributeInfo[] 73 | * mais = mi.getAttributes(); for (int i = 0; i < mais.length; i++) { 74 | * MBeanAttributeInfo mai = mais[i]; String attributeName = 75 | * mai.getName(); Object value = mbsc.getAttribute(objectName, 76 | * attributeName); System.out.println("Attribute:" + attributeName + "=" 77 | * + value); } 78 | * 79 | * } 80 | */ 81 | } 82 | 83 | } 84 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Ctrip Data Infrastructure Hue Apps 2 | ------------ 3 | 4 | Overview 5 | ------------ 6 | 7 | We are going to add more apps into hue. Currently, we have monitor and spark sql. 8 | Monitor will show Hadoop dashbard and every metric in your cluster. Spark sql is going to let user submit his sql to execute job in spark cluster. 9 | 10 | # Getting Started # 11 | 12 | ### Prerequisites ### 13 | - Hadoop Cluster Invironment 14 | - Ganglia Cluster 15 | - hue-2.5.0-cdh4.6.0 16 | 17 | #### Misc #### 18 | 19 | - JDK Runtime: JDK6 (OpenJDK or Oracle JDK) 20 | - Maven 21 | - Git 22 | - Tomcat (CATALINA_HOME being set) 23 | 24 | ## Installation ## 25 | 26 | 27 | ###Install Data Service:### 28 | 29 | $yum install tomcat 30 | $Create mysql table and the script is /di-data-service/src/main/resources/script/di.sql 31 | $Configured /di-data-service/src/main/resources/conf/di.properties for your invironment 32 | $cd Hue-Ctrip-DI/di-data-service; 33 | $mvn clean install -DskipTests 34 | $copy di-data-service.war to tomcat 35 | 36 | ###Install monitor app:### 37 | 38 | $mv Hue-Ctrip-DI/monitor $HUE_HOME/apps 39 | $cd $HUE_HOME/apps 40 | $sudo ../tools/app_reg/app_reg.py --install monitor 41 | 42 | ###Install spark sql app:### 43 | 44 | $mv Hue-Ctrip-DI/sparksql $HUE_HOME/apps 45 | $cd $HUE_HOME/apps 46 | $sudo ../tools/app_reg/app_reg.py --install sparksql 47 | 48 | ###hue.ini Config section for monitor### 49 | Configs needed in hue.ini config file. 50 | 51 | [monitor] 52 | [[di-service]] 53 | di_data_service_url="http://localhost:8080/di-data-service/" 54 | 55 | ###hue.ini Config section for sparksql### 56 | Configs needed in hue.ini config file. 57 | 58 | [sparksql] 59 | [[spark-service]] 60 | spark_sql_url="http://localhost:8089/di-data-service/" 61 | 62 | UI Example 63 | ------------ 64 | 65 | ###MapReduce Dashboard### 66 | 67 | ![Off-CLI Installation](https://github.com/Ctrip-DI/Hue-Ctrip-DI/blob/master/docs/mapreduce_dashboard.png) 68 | 69 | ###Hdfs Dashboard### 70 | 71 | ![Off-CLI Installation](https://github.com/Ctrip-DI/Hue-Ctrip-DI/blob/master/docs/hdfs_dashboard.png) 72 | 73 | ###Metric Dashboard### 74 | 75 | ![Off-CLI Installation](https://github.com/Ctrip-DI/Hue-Ctrip-DI/blob/master/docs/metric_monitor.png) 76 | 77 | ###Spark SQL UI## 78 | 79 | ![Off-CLI Installation](https://github.com/Ctrip-DI/Hue-Ctrip-DI/blob/master/docs/sparksql.png) 80 | 81 | ##Resources## 82 | 83 | Developer Mail: xgliao@ctrip.com 84 | 85 | ##Copyright and License## 86 | 87 | Copyright 2014 ctriposs 88 | 89 | Licensed under the Apache License, Version 2.0 (the "License"); you may not use this work except in compliance with the License. You may obtain a copy of the License in the LICENSE file, or at: 90 | 91 | http://www.apache.org/licenses/LICENSE-2.0 92 | 93 | Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 94 | 95 | 96 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/pojo/gen/ObjectFactory.java: -------------------------------------------------------------------------------- 1 | // 2 | // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 3 | // See http://java.sun.com/xml/jaxb 4 | // Any modifications to this file will be lost upon recompilation of the source schema. 5 | // Generated on: 2014.08.19 at 06:42:37 ���� CST 6 | // 7 | 8 | 9 | package com.ctrip.di.pojo.gen; 10 | 11 | import javax.xml.bind.JAXBElement; 12 | import javax.xml.bind.annotation.XmlElementDecl; 13 | import javax.xml.bind.annotation.XmlRegistry; 14 | import javax.xml.namespace.QName; 15 | 16 | 17 | /** 18 | * This object contains factory methods for each 19 | * Java content interface and Java element interface 20 | * generated in the generated package. 21 | *

An ObjectFactory allows you to programatically 22 | * construct new instances of the Java representation 23 | * for XML content. The Java representation of XML 24 | * content can consist of schema derived interfaces 25 | * and classes representing the binding of schema 26 | * type definitions, element declarations and model 27 | * groups. Factory methods for each of these are 28 | * provided in this class. 29 | * 30 | */ 31 | @XmlRegistry 32 | public class ObjectFactory { 33 | 34 | private final static QName _Host_QNAME = new QName("", "host"); 35 | private final static QName _Link_QNAME = new QName("", "link"); 36 | 37 | /** 38 | * Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: generated 39 | * 40 | */ 41 | public ObjectFactory() { 42 | } 43 | 44 | /** 45 | * Create an instance of {@link HostList } 46 | * 47 | */ 48 | public HostList createHostList() { 49 | return new HostList(); 50 | } 51 | 52 | /** 53 | * Create an instance of {@link Metrics } 54 | * 55 | */ 56 | public Metrics createMetrics() { 57 | return new Metrics(); 58 | } 59 | 60 | /** 61 | * Create an instance of {@link Metric } 62 | * 63 | */ 64 | public Metric createMetric() { 65 | return new Metric(); 66 | } 67 | 68 | /** 69 | * Create an instance of {@link Cluster } 70 | * 71 | */ 72 | public Cluster createCluster() { 73 | return new Cluster(); 74 | } 75 | 76 | /** 77 | * Create an instance of {@link Clusters } 78 | * 79 | */ 80 | public Clusters createClusters() { 81 | return new Clusters(); 82 | } 83 | 84 | /** 85 | * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}} 86 | * 87 | */ 88 | @XmlElementDecl(namespace = "", name = "host") 89 | public JAXBElement createHost(String value) { 90 | return new JAXBElement(_Host_QNAME, String.class, null, value); 91 | } 92 | 93 | /** 94 | * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}} 95 | * 96 | */ 97 | @XmlElementDecl(namespace = "", name = "link") 98 | public JAXBElement createLink(String value) { 99 | return new JAXBElement(_Link_QNAME, String.class, null, value); 100 | } 101 | 102 | } 103 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/common/jmx/HdfsJmxService.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.common.jmx; 2 | 3 | import javax.management.ObjectName; 4 | import javax.management.openmbean.CompositeDataSupport; 5 | 6 | import net.sf.json.JSONObject; 7 | 8 | import org.springframework.beans.factory.annotation.Value; 9 | import org.springframework.stereotype.Service; 10 | 11 | @Service 12 | public class HdfsJmxService extends AbstractJmxService { 13 | 14 | @Value("${HDFS_JMX_SERVER}") 15 | private String hdfsJmxServer; 16 | 17 | @Override 18 | protected String getJmxServer() { 19 | return hdfsJmxServer; 20 | } 21 | 22 | @Override 23 | public JSONObject getNewJmxBean() throws Exception { 24 | HdfsJmxBean hdfsJmxBean = new HdfsJmxBean(); 25 | 26 | ObjectName memoryObjectName = new ObjectName("java.lang:type=Memory"); 27 | CompositeDataSupport heapMemoryUsage = (CompositeDataSupport) mbsc 28 | .getAttribute(memoryObjectName, "HeapMemoryUsage"); 29 | hdfsJmxBean.setCommitedHeapMemory((Long) heapMemoryUsage 30 | .get("committed")); 31 | hdfsJmxBean.setUsedHeapMemory((Long) heapMemoryUsage.get("used")); 32 | hdfsJmxBean.setTotalHeapMemory((Long) heapMemoryUsage.get("max")); 33 | 34 | CompositeDataSupport nonHeapMemoryUsage = (CompositeDataSupport) mbsc 35 | .getAttribute(memoryObjectName, "NonHeapMemoryUsage"); 36 | hdfsJmxBean.setCommitedNonHeapMemory((Long) nonHeapMemoryUsage 37 | .get("committed")); 38 | hdfsJmxBean.setUsedNonHeapMemory((Long) nonHeapMemoryUsage.get("used")); 39 | hdfsJmxBean.setTotalNonHeapMemory((Long) nonHeapMemoryUsage.get("max")); 40 | 41 | ObjectName on = new ObjectName( 42 | "Hadoop:service=NameNode,name=NameNodeInfo"); 43 | Object totalFiles = mbsc.getAttribute(on, "TotalFiles"); 44 | if (totalFiles != null) { 45 | hdfsJmxBean.setUsedFileDirCount((Long) totalFiles); 46 | } 47 | Object totalBlocks = mbsc.getAttribute(on, "TotalBlocks"); 48 | if (totalBlocks != null) { 49 | hdfsJmxBean.setUsedFileDirCount((Long) totalBlocks); 50 | } 51 | 52 | Object usedDfs = mbsc.getAttribute(on, "Used"); 53 | if (usedDfs != null) { 54 | hdfsJmxBean.setUsedDfs((Long) usedDfs); 55 | } 56 | 57 | Object totalDfs = mbsc.getAttribute(on, "Total"); 58 | if (totalDfs != null) { 59 | hdfsJmxBean.setTotalDfsCapacity((Long) totalDfs); 60 | } 61 | 62 | Object nonDfsUsedSpace = mbsc.getAttribute(on, "NonDfsUsedSpace"); 63 | if (nonDfsUsedSpace != null) { 64 | hdfsJmxBean.setUsedNonDfs((Long) nonDfsUsedSpace); 65 | } 66 | 67 | Object blockPoolUsedSpace = mbsc.getAttribute(on, "BlockPoolUsedSpace"); 68 | if (blockPoolUsedSpace != null) { 69 | hdfsJmxBean.setUsedBlockPool((Long) blockPoolUsedSpace); 70 | } 71 | 72 | ObjectName fson = new ObjectName( 73 | "Hadoop:service=NameNode,name=FSNamesystem"); 74 | Object blockCapacity = mbsc.getAttribute(fson, "BlockCapacity"); 75 | if (blockCapacity != null) { 76 | hdfsJmxBean.setTotalBlockCount((Integer) blockCapacity); 77 | } 78 | 79 | Object liveNodes = mbsc.getAttribute(on, "LiveNodes"); 80 | if (liveNodes != null) { 81 | hdfsJmxBean.setLiveNodes((String) liveNodes); 82 | } 83 | Object deadNodes = mbsc.getAttribute(on, "DeadNodes"); 84 | if (deadNodes != null) { 85 | hdfsJmxBean.setDeadNodes((String) deadNodes); 86 | } 87 | Object decomNodes = mbsc.getAttribute(on, "DecomNodes"); 88 | if (decomNodes != null) { 89 | hdfsJmxBean.setDecomNodes((String) deadNodes); 90 | } 91 | 92 | JSONObject json = JSONObject.fromObject(hdfsJmxBean); 93 | 94 | return json; 95 | } 96 | 97 | } 98 | -------------------------------------------------------------------------------- /monitor/src/monitor/static/js/combobox.js: -------------------------------------------------------------------------------- 1 | (function( $ ) { 2 | $.widget( "ui.combobox", { 3 | _create: function() { 4 | var self = this, 5 | select = this.element.hide(), 6 | selected = select.children( ":selected" ), 7 | value = selected.val() ? selected.text() : ""; 8 | var input = this.input = $( "" ) 9 | .insertAfter( select ) 10 | .val( value ) 11 | .autocomplete({ 12 | delay: 0, 13 | minLength: 0, 14 | source: function( request, response ) { 15 | var matcher = new RegExp( $.ui.autocomplete.escapeRegex(request.term), "i" ); 16 | response( select.children( "option" ).map(function() { 17 | var text = $( this ).text(); 18 | if ( this.value && ( !request.term || matcher.test(text) ) ) 19 | return { 20 | label: text.replace( 21 | new RegExp( 22 | "(?![^&;]+;)(?!<[^<>]*)(" + 23 | $.ui.autocomplete.escapeRegex(request.term) + 24 | ")(?![^<>]*>)(?![^&;]+;)", "gi" 25 | ), "$1" ), 26 | value: text, 27 | option: this 28 | }; 29 | }) ); 30 | }, 31 | select: function( event, ui ) { 32 | ui.item.option.selected = true; 33 | self._trigger( "selected", event, { 34 | item: ui.item.option 35 | }); 36 | }, 37 | change: function( event, ui ) { 38 | if ( !ui.item ) { 39 | var matcher = new RegExp( "^" + $.ui.autocomplete.escapeRegex( $(this).val() ) + "$", "i" ), 40 | valid = false; 41 | select.children( "option" ).each(function() { 42 | if ( $( this ).text().match( matcher ) ) { 43 | this.selected = valid = true; 44 | return false; 45 | } 46 | }); 47 | if ( !valid ) { 48 | // remove invalid value, as it didn't match anything 49 | $( this ).val( "" ); 50 | select.val( "" ); 51 | input.data( "autocomplete" ).term = ""; 52 | return false; 53 | } 54 | } 55 | } 56 | }) 57 | .addClass( "ui-widget ui-widget-content ui-corner-left" ) 58 | .attr('id','autoSearch'); 59 | 60 | /*input.data( "autocomplete" )._renderItem = function( ul, item ) { 61 | return $( "

  • " ) 62 | .data( "ui-autocomplete-item", item ) 63 | .append( "" + item.label + "" ) 64 | .appendTo( ul ); 65 | };*/ 66 | 67 | this.button = $( "" ) 68 | .attr( "tabIndex", -1 ) 69 | .attr( "title", "Show All Items" ) 70 | .insertAfter( input ) 71 | .button({ 72 | icons: { 73 | primary: "ui-icon-triangle-1-s" 74 | }, 75 | text: false 76 | }) 77 | .removeClass( "ui-corner-all" ) 78 | .addClass( "ui-corner-right ui-button-icon" ) 79 | .click(function() { 80 | // close if already visible 81 | if ( input.autocomplete( "widget" ).is( ":visible" ) ) { 82 | input.autocomplete( "close" ); 83 | return; 84 | } 85 | 86 | // work around a bug (likely same cause as #5265) 87 | $( this ).blur(); 88 | 89 | // pass empty string as value to search for, displaying all results 90 | input.autocomplete( "search", "" ); 91 | input.focus(); 92 | }); 93 | }, 94 | 95 | destroy: function() { 96 | this.input.remove(); 97 | this.button.remove(); 98 | this.element.show(); 99 | $.Widget.prototype.destroy.call( this ); 100 | } 101 | }); 102 | })( jQuery ); 103 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/hdfs/HdfsFileSummaryService.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.hdfs; 2 | 3 | import java.io.IOException; 4 | import java.net.URL; 5 | import java.util.ArrayList; 6 | import java.util.List; 7 | 8 | import javax.inject.Singleton; 9 | 10 | import org.apache.commons.logging.Log; 11 | import org.apache.commons.logging.LogFactory; 12 | import org.apache.hadoop.conf.Configuration; 13 | import org.apache.hadoop.fs.ContentSummary; 14 | import org.apache.hadoop.fs.FileStatus; 15 | import org.apache.hadoop.fs.FileSystem; 16 | import org.apache.hadoop.fs.Path; 17 | import org.springframework.beans.factory.annotation.Value; 18 | import org.springframework.scheduling.annotation.Scheduled; 19 | import org.springframework.stereotype.Service; 20 | 21 | /** 22 | * Hdfs file summary data Service, which provide information for hdfs directory summary 23 | * and refresh the summary automatically 24 | * @author xgliao 25 | * 26 | */ 27 | @Service 28 | @Singleton 29 | public class HdfsFileSummaryService { 30 | private static Log logger = LogFactory.getLog(HdfsFileSummaryService.class); 31 | 32 | private volatile List summaryList = new ArrayList(); 33 | 34 | @Value("${HDFS_CONFIG}") 35 | private String hdfsConfig; 36 | @Value("${HADOOP_USER_NAME}") 37 | private String hadoopUserName; 38 | 39 | public List getContentSummaryList() { 40 | if (summaryList.isEmpty()) { 41 | try { 42 | start(); 43 | } catch (IOException e) { 44 | logger.error("IO Exception", e); 45 | throw new RuntimeException("IO Exception", e); 46 | } 47 | 48 | } 49 | 50 | return summaryList; 51 | } 52 | 53 | @Scheduled(fixedDelay = 4 * 3600 * 1000) 54 | public void start() throws IOException { 55 | logger.info("Start Initialize Hdfs File Summary:" 56 | + System.currentTimeMillis()); 57 | synchronized (summaryList) { 58 | URL configUrl = HdfsFileSummaryService.class.getClassLoader() 59 | .getResource(hdfsConfig); 60 | Configuration conf = new Configuration(); 61 | conf.addResource(configUrl); 62 | 63 | System.setProperty("HADOOP_USER_NAME", hadoopUserName); 64 | FileSystem fs = FileSystem.get(conf); 65 | 66 | System.setProperty("user.name","hdfs"); 67 | List summaryListTemp = new ArrayList(); 68 | for (FileStatus fileStatus : fs.listStatus(new Path("/user"))) { 69 | if (fileStatus.isDirectory()) { 70 | Path filePath = fileStatus.getPath(); 71 | ContentSummary summary = fs.getContentSummary(filePath); 72 | HdfsDirSummary hdfsDirSummay = new HdfsDirSummary(); 73 | hdfsDirSummay.setUser(filePath.getName()); 74 | hdfsDirSummay 75 | .setDirectoryCount(summary.getDirectoryCount()); 76 | hdfsDirSummay.setFileCount(summary.getFileCount()); 77 | hdfsDirSummay.setLength(summary.getLength()); 78 | hdfsDirSummay.setQuota(summary.getQuota()); 79 | hdfsDirSummay.setSpaceConsumed(summary.getSpaceConsumed()); 80 | hdfsDirSummay.setSpaceQuota(summary.getSpaceQuota()); 81 | 82 | summaryListTemp.add(hdfsDirSummay); 83 | 84 | } 85 | } 86 | 87 | if (summaryListTemp.size() > 0) { 88 | summaryList = summaryListTemp; 89 | summaryListTemp = null; 90 | } 91 | } 92 | 93 | logger.info("End Initialize Hdfs File Summary:" 94 | + System.currentTimeMillis()); 95 | } 96 | 97 | public static void main(String[] args) throws IOException { 98 | HdfsFileSummaryService test = new HdfsFileSummaryService(); 99 | test.start(); 100 | } 101 | 102 | } 103 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/controller/SparkController.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.controller; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | import javax.servlet.http.HttpServletRequest; 7 | import javax.servlet.http.HttpServletResponse; 8 | 9 | import net.sf.json.JSONArray; 10 | 11 | import org.apache.commons.lang.StringUtils; 12 | import org.apache.commons.logging.Log; 13 | import org.apache.commons.logging.LogFactory; 14 | import org.springframework.beans.factory.annotation.Autowired; 15 | import org.springframework.stereotype.Controller; 16 | import org.springframework.web.bind.annotation.RequestMapping; 17 | 18 | import com.ctrip.di.common.util.PrintWriterUtil; 19 | import com.ctrip.di.dao.spark.SparkJob; 20 | import com.ctrip.di.dao.spark.SparkJobMapper; 21 | import com.ctrip.di.spark.SparkService; 22 | 23 | /** 24 | * APIs to excute sql on spark 25 | * @author xgliao 26 | * 27 | */ 28 | @Controller 29 | @RequestMapping("/spark") 30 | public class SparkController { 31 | private static Log logger = LogFactory.getLog(SparkController.class); 32 | 33 | private static final int SQL_MAX_LENGTH = 2000; 34 | 35 | @Autowired 36 | private SparkService sparkService; 37 | 38 | @Autowired 39 | private SparkJobMapper sparkJobMapper; 40 | 41 | @RequestMapping("/execute") 42 | public void executeSQL(HttpServletRequest request, 43 | HttpServletResponse response) { 44 | String sql = request.getParameter("sql"); 45 | if (StringUtils.isEmpty(sql)) { 46 | PrintWriterUtil 47 | .writeError(request, response, "SQL can not be null"); 48 | return; 49 | } 50 | String user = request.getParameter("user"); 51 | String database = request.getParameter("database"); 52 | if (database == null) { 53 | database = "default"; 54 | } 55 | sql = sql.replace(";", ""); 56 | List> result = null; 57 | SparkJob sparkJob = new SparkJob(); 58 | sparkJob.setUser(user); 59 | if (sql.length() > SQL_MAX_LENGTH) { 60 | sql = sql.substring(0, SQL_MAX_LENGTH); 61 | } 62 | sparkJob.setSql(sql); 63 | sparkJob.setStartTime(System.currentTimeMillis()); 64 | try { 65 | result = sparkService.executeSQL(sql, database); 66 | } catch (Exception e) { 67 | logger.error("Exception:{}", e); 68 | PrintWriterUtil.writeError(request, response, e.getMessage()); 69 | return; 70 | } 71 | sparkJob.setFinishTime(System.currentTimeMillis()); 72 | boolean isSuccess = getStatus(result); 73 | if (isSuccess) { 74 | sparkJob.setStatus("Success"); 75 | } else { 76 | sparkJob.setStatus("Fail"); 77 | } 78 | if (user != null) { 79 | sparkJobMapper.insertSparkJob(sparkJob); 80 | } 81 | 82 | JSONArray jsonArray = JSONArray.fromObject(result); 83 | PrintWriterUtil.writeJson(request, response, jsonArray.toString()); 84 | } 85 | 86 | @RequestMapping("/getjobs") 87 | public void getJobs(HttpServletRequest request, HttpServletResponse response) { 88 | String user = request.getParameter("user"); 89 | if (StringUtils.isEmpty(user)) { 90 | PrintWriterUtil.writeError(request, response, 91 | "User can not be null"); 92 | return; 93 | } 94 | List sparkJobs = sparkJobMapper.getSparkJobsByUser(user); 95 | JSONArray jsonArray = JSONArray.fromObject(sparkJobs); 96 | PrintWriterUtil.writeJson(request, response, jsonArray.toString()); 97 | } 98 | 99 | private boolean getStatus(List> result) { 100 | if (result != null && !result.isEmpty()) { 101 | Object status = result.get(0).get("status"); 102 | if (status != null) { 103 | return false; 104 | } 105 | } 106 | return true; 107 | } 108 | 109 | } 110 | -------------------------------------------------------------------------------- /sparksql/src/sparksql/templates/index.mako: -------------------------------------------------------------------------------- 1 | <%! 2 | from desktop.views import commonheader, commonfooter 3 | from sparksql.conf import SPARK_SERVICE 4 | %> 5 | 6 | <%namespace name="shared" file="shared_components.mako" /> 7 | 8 | ${commonheader("Sparksql", "sparksql", user, "100px")|n,unicode} 9 | 10 | ## Use double hashes for a mako template comment 11 | ## Main body 12 | 13 | 14 | 24 | 25 |
    26 | 27 | 数据库: 28 |
    30 | 31 |
    32 | 33 |
    34 | 35 | 36 |
    37 | 38 |
    39 |
    40 |
    41 | 数据加载中... 42 |
    43 |
    44 | 45 |
    46 |
    执行结果
    47 | 48 |
    49 |
    50 | 每页显示条记录 57 |
    58 | 59 |
    60 | 61 | 62 | 63 | 70 | 71 | 72 | 73 |
    启动时间 64 | 结束时间 65 | 执行时间(s) 66 | 执行SQL 67 | 执行用户 68 | Status 69 |
    74 | 75 | 76 |
    77 | 78 | 下一页 79 | 上一页 80 | 首页 81 |
    82 |
    83 |
    84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | ${commonfooter(messages)|n,unicode} 92 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/controller/GangliaController.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.controller; 2 | 3 | import java.io.IOException; 4 | import java.io.PrintWriter; 5 | 6 | import javax.servlet.http.HttpServletRequest; 7 | import javax.servlet.http.HttpServletResponse; 8 | 9 | import org.apache.commons.lang.StringUtils; 10 | import org.apache.commons.logging.Log; 11 | import org.apache.commons.logging.LogFactory; 12 | import org.springframework.beans.factory.annotation.Autowired; 13 | import org.springframework.beans.factory.annotation.Value; 14 | import org.springframework.stereotype.Controller; 15 | import org.springframework.web.bind.annotation.RequestMapping; 16 | 17 | import com.ctrip.di.common.util.UrlUtils; 18 | import com.ctrip.di.ganglia.GangliaMetricService; 19 | 20 | /** 21 | * APIs to get information from ganglia 22 | * 23 | * @author xgliao 24 | * 25 | */ 26 | @Controller 27 | @RequestMapping("/ganglia") 28 | public class GangliaController { 29 | private static Log logger = LogFactory.getLog(GangliaController.class); 30 | 31 | @Value("${GANGLIA_GRAPH_URL}") 32 | private String graphUrl; 33 | @Value("${GANGLIA_HOST_URL}") 34 | private String hostUrl; 35 | 36 | @Autowired 37 | private GangliaMetricService metricService; 38 | 39 | @RequestMapping("/graphjson") 40 | public void getData(HttpServletRequest request, HttpServletResponse response) { 41 | String queryString = request.getQueryString(); 42 | 43 | response.setContentType("application/json"); 44 | String jsonpCallback = request.getParameter("jsonpCallback"); 45 | String url = graphUrl + queryString; 46 | PrintWriter pw = null; 47 | try { 48 | pw = response.getWriter(); 49 | if (StringUtils.isNotBlank(queryString) 50 | && queryString.indexOf("json=") != -1) { 51 | String jsonData = UrlUtils.getContent(url); 52 | pw.println(jsonpCallback + "(" + jsonData + ")"); 53 | } else { 54 | pw.println(jsonpCallback 55 | + "(" 56 | + "{\"status\":\"failed\",\"messange\":\"json parameter can not be null\"}" 57 | + ")"); 58 | } 59 | } catch (Exception e) { 60 | logger.error("Ganglia Json Error:" + url, e); 61 | if (pw != null) { 62 | pw.println(jsonpCallback + "(" + "{\"status\":\"failed\"}" 63 | + ")"); 64 | } 65 | } finally { 66 | if (pw != null) { 67 | pw.flush(); 68 | pw.close(); 69 | } 70 | } 71 | } 72 | 73 | /* 74 | * @RequestMapping("/graphjsonbycluster") public void 75 | * getDataByCluster(HttpServletRequest request, HttpServletResponse 76 | * response) { 77 | * 78 | * response.setContentType("application/json"); 79 | * 80 | * String clusterName = request.getParameter("clustername"); if (clusterName 81 | * == null) { PrintWriterUtil.writeError(request, response, 82 | * "Cluster Name Can Not be null"); return; } String queryString = 83 | * request.getQueryString(); List hostList = 84 | * metricService.getHostListByCluster(clusterName); for (String host : 85 | * hostList) { String url = graphUrl + queryString + "&&h=" + host; String 86 | * jsonData = UrlUtils.getContent(url); } 87 | * 88 | * String jsonpCallback = request.getParameter("jsonpCallback"); // TODO 89 | * UNFINISHED } 90 | */ 91 | 92 | @RequestMapping("/clusterinfo") 93 | public void getHostInfo(HttpServletRequest request, 94 | HttpServletResponse response) { 95 | String json = UrlUtils.getContent(hostUrl); 96 | response.setContentType("application/json"); 97 | String jsonpCallback = request.getParameter("jsonpCallback"); 98 | PrintWriter pw = null; 99 | try { 100 | pw = response.getWriter(); 101 | pw.println(jsonpCallback + "(" + json + ")"); 102 | } catch (IOException e) { 103 | logger.error("Ganglia Json Error:" + hostUrl, e); 104 | } finally { 105 | if (pw != null) { 106 | pw.flush(); 107 | pw.close(); 108 | } 109 | } 110 | 111 | } 112 | 113 | } 114 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/spark/SparkService.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.spark; 2 | 3 | import java.sql.Connection; 4 | import java.sql.DriverManager; 5 | import java.sql.ResultSet; 6 | import java.sql.ResultSetMetaData; 7 | import java.sql.SQLException; 8 | import java.sql.Statement; 9 | import java.util.ArrayList; 10 | import java.util.HashMap; 11 | import java.util.List; 12 | import java.util.Map; 13 | 14 | import org.apache.commons.logging.Log; 15 | import org.apache.commons.logging.LogFactory; 16 | import org.springframework.beans.factory.annotation.Value; 17 | import org.springframework.stereotype.Service; 18 | 19 | /** 20 | * Spark service to excute spark sql 21 | * 22 | * @author xgliao 23 | * 24 | */ 25 | @Service 26 | public class SparkService { 27 | private static Log logger = LogFactory.getLog(SparkService.class); 28 | 29 | @Value("${SPARK_JDBC_URL}") 30 | private String sparkUrl; 31 | 32 | private ThreadLocal sparkConnection = new ThreadLocal() { 33 | 34 | @Override 35 | protected Connection initialValue() { 36 | return getConnection(); 37 | } 38 | }; 39 | 40 | protected Connection getConnection() { 41 | Connection conn = null; 42 | try { 43 | Class.forName("org.apache.hive.jdbc.HiveDriver"); 44 | conn = DriverManager.getConnection(sparkUrl, "hive", "hive"); 45 | } catch (ClassNotFoundException e) { 46 | logger.error("Class Not Found Exception:{}", e); 47 | } catch (SQLException e) { 48 | logger.error("SQL Exception:{}", e); 49 | } 50 | 51 | return conn; 52 | } 53 | 54 | /** 55 | * excute sql, if failed, retry again 56 | * 57 | * @param sql 58 | * @param database 59 | * @return 60 | */ 61 | public List> executeSQL(String sql, String database) { 62 | List> result = execute(sql, database); 63 | if (result == null) { 64 | return execute(sql, database); 65 | } 66 | 67 | return result; 68 | } 69 | 70 | private List> execute(String sql, String database) { 71 | List> result = new ArrayList>(); 72 | 73 | Connection conn = sparkConnection.get(); 74 | 75 | ResultSet rs = null; 76 | Statement statement = null; 77 | try { 78 | statement = conn.createStatement(); 79 | boolean isSuccess = statement.execute(getUseDatabase(database)); 80 | if (isSuccess) { 81 | rs = statement.executeQuery(sql); 82 | while (rs.next()) { 83 | ResultSetMetaData metaData = rs.getMetaData(); 84 | Map valueMap = new HashMap(); 85 | for (int i = 1; i <= metaData.getColumnCount(); i++) { 86 | String columnName = metaData.getColumnName(i); 87 | Object value = rs.getObject(columnName); 88 | valueMap.put(columnName, value); 89 | } 90 | result.add(valueMap); 91 | } 92 | } else { 93 | Map errorMap = new HashMap(); 94 | errorMap.put("status", "error"); 95 | errorMap.put("message", 96 | "User Database Failed, please check if database is right"); 97 | 98 | result.add(errorMap); 99 | } 100 | } catch (Exception e) { 101 | logger.error("Exception:", e); 102 | if (conn != null) { 103 | try { 104 | conn.close(); 105 | } catch (SQLException e1) { 106 | // Ignore Exception 107 | } 108 | } 109 | //reset connection 110 | sparkConnection.set(getConnection()); 111 | return null; 112 | } finally { 113 | if (statement != null) { 114 | try { 115 | statement.close(); 116 | } catch (SQLException e) { 117 | // Ignore Exception 118 | } 119 | } 120 | if (rs != null) { 121 | try { 122 | rs.close(); 123 | } catch (SQLException e) { 124 | // Ignore Exception 125 | } 126 | } 127 | } 128 | 129 | return result; 130 | } 131 | 132 | private String getUseDatabase(String database) { 133 | return "use " + database; 134 | } 135 | 136 | } 137 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/common/jmx/HdfsJmxBean.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.common.jmx; 2 | 3 | /** 4 | * 5 | * HDFS Jmx Bean, get the information from hadoop hdfs cluster 6 | * @author xgliao 7 | * 8 | */ 9 | public class HdfsJmxBean { 10 | private long usedFileDirCount; 11 | private long usedBlockCount; 12 | private int totalBlockCount; 13 | 14 | private long usedHeapMemory; 15 | private long commitedHeapMemory; 16 | private long totalHeapMemory; 17 | 18 | private long usedNonHeapMemory; 19 | private long commitedNonHeapMemory; 20 | private long totalNonHeapMemory; 21 | 22 | private long totalDfsCapacity; 23 | private long usedDfs; 24 | private long usedNonDfs; 25 | 26 | private long usedBlockPool; 27 | 28 | private String liveNodes; 29 | private String deadNodes; 30 | private String decomNodes; 31 | 32 | public long getUsedFileDirCount() { 33 | return usedFileDirCount; 34 | } 35 | 36 | public void setUsedFileDirCount(long usedFileDirCount) { 37 | this.usedFileDirCount = usedFileDirCount; 38 | } 39 | 40 | public long getUsedBlockCount() { 41 | return usedBlockCount; 42 | } 43 | 44 | public void setUsedBlockCount(long usedBlockCount) { 45 | this.usedBlockCount = usedBlockCount; 46 | } 47 | 48 | public int getTotalBlockCount() { 49 | return totalBlockCount; 50 | } 51 | 52 | public void setTotalBlockCount(int totalBlockCount) { 53 | this.totalBlockCount = totalBlockCount; 54 | } 55 | 56 | public long getUsedHeapMemory() { 57 | return usedHeapMemory; 58 | } 59 | 60 | public void setUsedHeapMemory(long usedHeapMemory) { 61 | this.usedHeapMemory = usedHeapMemory; 62 | } 63 | 64 | public long getCommitedHeapMemory() { 65 | return commitedHeapMemory; 66 | } 67 | 68 | public void setCommitedHeapMemory(long commitedHeapMemory) { 69 | this.commitedHeapMemory = commitedHeapMemory; 70 | } 71 | 72 | public long getTotalHeapMemory() { 73 | return totalHeapMemory; 74 | } 75 | 76 | public void setTotalHeapMemory(long totalHeapMemory) { 77 | this.totalHeapMemory = totalHeapMemory; 78 | } 79 | 80 | public long getUsedNonHeapMemory() { 81 | return usedNonHeapMemory; 82 | } 83 | 84 | public void setUsedNonHeapMemory(long usedNonHeapMemory) { 85 | this.usedNonHeapMemory = usedNonHeapMemory; 86 | } 87 | 88 | public long getCommitedNonHeapMemory() { 89 | return commitedNonHeapMemory; 90 | } 91 | 92 | public void setCommitedNonHeapMemory(long commitedNonHeapMemory) { 93 | this.commitedNonHeapMemory = commitedNonHeapMemory; 94 | } 95 | 96 | public long getTotalNonHeapMemory() { 97 | return totalNonHeapMemory; 98 | } 99 | 100 | public void setTotalNonHeapMemory(long totalNonHeapMemory) { 101 | this.totalNonHeapMemory = totalNonHeapMemory; 102 | } 103 | 104 | public long getTotalDfsCapacity() { 105 | return totalDfsCapacity; 106 | } 107 | 108 | public void setTotalDfsCapacity(long totalDfsCapacity) { 109 | this.totalDfsCapacity = totalDfsCapacity; 110 | } 111 | 112 | public long getUsedDfs() { 113 | return usedDfs; 114 | } 115 | 116 | public void setUsedDfs(long usedDfs) { 117 | this.usedDfs = usedDfs; 118 | } 119 | 120 | public long getUsedNonDfs() { 121 | return usedNonDfs; 122 | } 123 | 124 | public void setUsedNonDfs(long usedNonDfs) { 125 | this.usedNonDfs = usedNonDfs; 126 | } 127 | 128 | public long getUsedBlockPool() { 129 | return usedBlockPool; 130 | } 131 | 132 | public void setUsedBlockPool(long usedBlockPool) { 133 | this.usedBlockPool = usedBlockPool; 134 | } 135 | 136 | public String getLiveNodes() { 137 | return liveNodes; 138 | } 139 | 140 | public void setLiveNodes(String liveNodes) { 141 | this.liveNodes = liveNodes; 142 | } 143 | 144 | public String getDeadNodes() { 145 | return deadNodes; 146 | } 147 | 148 | public void setDeadNodes(String deadNodes) { 149 | this.deadNodes = deadNodes; 150 | } 151 | 152 | public String getDecomNodes() { 153 | return decomNodes; 154 | } 155 | 156 | public void setDecomNodes(String decomNodes) { 157 | this.decomNodes = decomNodes; 158 | } 159 | 160 | } 161 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/dao/YarnJobCountDo.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.dao; 2 | 3 | import java.text.ParseException; 4 | import java.text.SimpleDateFormat; 5 | 6 | import net.sf.json.JSONObject; 7 | 8 | import org.apache.commons.logging.Log; 9 | import org.apache.commons.logging.LogFactory; 10 | 11 | /** 12 | * Yarn job count data object 13 | * @author xgliao 14 | * 15 | */ 16 | public class YarnJobCountDo { 17 | private static Log logger = LogFactory.getLog(YarnJobCountDo.class); 18 | 19 | private String dateStr; 20 | private long dateTime; 21 | private int totalCount; 22 | private int successCount; 23 | private int failCount; 24 | private int errorCount; 25 | private int killCount; 26 | 27 | private int unsuccessCount; 28 | private double successRate; 29 | private double unsuccessRate; 30 | private double failRate; 31 | private double killRate; 32 | private double errorRate; 33 | 34 | public String getDateStr() { 35 | return dateStr; 36 | } 37 | 38 | public void setDateStr(String dateStr) { 39 | this.dateStr = dateStr; 40 | } 41 | 42 | public long getDateTime() { 43 | return dateTime; 44 | } 45 | 46 | public void setDateTime(long dateTime) { 47 | this.dateTime = dateTime; 48 | } 49 | 50 | public int getTotalCount() { 51 | return totalCount; 52 | } 53 | 54 | public void setTotalCount(int totalCount) { 55 | this.totalCount = totalCount; 56 | } 57 | 58 | public int getSuccessCount() { 59 | return successCount; 60 | } 61 | 62 | public void setSuccessCount(int successCount) { 63 | this.successCount = successCount; 64 | } 65 | 66 | public int getFailCount() { 67 | return failCount; 68 | } 69 | 70 | public void setFailCount(int failCount) { 71 | this.failCount = failCount; 72 | } 73 | 74 | public int getErrorCount() { 75 | return errorCount; 76 | } 77 | 78 | public void setErrorCount(int errorCount) { 79 | this.errorCount = errorCount; 80 | } 81 | 82 | public int getKillCount() { 83 | return killCount; 84 | } 85 | 86 | public void setKillCount(int killCount) { 87 | this.killCount = killCount; 88 | } 89 | 90 | public double getSuccessRate() { 91 | return successRate; 92 | } 93 | 94 | public void setSuccessRate(double successRate) { 95 | this.successRate = successRate; 96 | } 97 | 98 | public double getUnsuccessRate() { 99 | return unsuccessRate; 100 | } 101 | 102 | public void setUnsuccessRate(double unsuccessRate) { 103 | this.unsuccessRate = unsuccessRate; 104 | } 105 | 106 | public double getFailRate() { 107 | return failRate; 108 | } 109 | 110 | public void setFailRate(double failRate) { 111 | this.failRate = failRate; 112 | } 113 | 114 | public double getKillRate() { 115 | return killRate; 116 | } 117 | 118 | public void setKillRate(double killRate) { 119 | this.killRate = killRate; 120 | } 121 | 122 | public int getUnsuccessCount() { 123 | return unsuccessCount; 124 | } 125 | 126 | public void setUnsuccessCount(int unsuccessCount) { 127 | this.unsuccessCount = unsuccessCount; 128 | } 129 | 130 | public double getErrorRate() { 131 | return errorRate; 132 | } 133 | 134 | public void setErrorRate(double errorRate) { 135 | this.errorRate = errorRate; 136 | } 137 | 138 | public void initRate() { 139 | try { 140 | dateTime = new SimpleDateFormat("yyyy-MM-dd").parse(dateStr) 141 | .getTime(); 142 | } catch (ParseException e) { 143 | // Ignore 144 | logger.error("Parse Exception", e); 145 | } 146 | if (totalCount == 0) { 147 | return; 148 | } 149 | successRate = div(successCount, totalCount); 150 | errorRate = div(errorCount, totalCount); 151 | killRate = div(killCount, totalCount); 152 | failRate = (10000 - (int) (100 * successRate) - (int) (100 * errorRate) - (int) (100 * killRate)) 153 | / (double) 100; 154 | unsuccessRate = (10000 - (int) (100 * successRate)) / (double) 100; 155 | } 156 | 157 | private double div(int num, int denom) { 158 | double result = (int) ((num / (double) denom) * 10000) / (double) 100; 159 | return result; 160 | } 161 | 162 | public static void main(String[] args) { 163 | YarnJobCountDo countDo = new YarnJobCountDo(); 164 | countDo.setDateStr("2014-08-25"); 165 | countDo.setTotalCount(13823); 166 | countDo.setSuccessCount(13780); 167 | countDo.setFailCount(33); 168 | 169 | JSONObject json = JSONObject.fromObject(countDo); 170 | System.out.println(json.toString()); 171 | } 172 | 173 | } 174 | -------------------------------------------------------------------------------- /monitor/src/monitor/static/js/jquery.cookie.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Cookie plugin 3 | * 4 | * Copyright (c) 2006 Klaus Hartl (stilbuero.de) 5 | * Dual licensed under the MIT and GPL licenses: 6 | * http://www.opensource.org/licenses/mit-license.php 7 | * http://www.gnu.org/licenses/gpl.html 8 | * 9 | */ 10 | 11 | /** 12 | * Create a cookie with the given name and value and other optional parameters. 13 | * 14 | * @example $.cookie('the_cookie', 'the_value'); 15 | * @desc Set the value of a cookie. 16 | * @example $.cookie('the_cookie', 'the_value', { expires: 7, path: '/', domain: 'jquery.com', secure: true }); 17 | * @desc Create a cookie with all available options. 18 | * @example $.cookie('the_cookie', 'the_value'); 19 | * @desc Create a session cookie. 20 | * @example $.cookie('the_cookie', null); 21 | * @desc Delete a cookie by passing null as value. Keep in mind that you have to use the same path and domain 22 | * used when the cookie was set. 23 | * 24 | * @param String name The name of the cookie. 25 | * @param String value The value of the cookie. 26 | * @param Object options An object literal containing key/value pairs to provide optional cookie attributes. 27 | * @option Number|Date expires Either an integer specifying the expiration date from now on in days or a Date object. 28 | * If a negative value is specified (e.g. a date in the past), the cookie will be deleted. 29 | * If set to null or omitted, the cookie will be a session cookie and will not be retained 30 | * when the the browser exits. 31 | * @option String path The value of the path atribute of the cookie (default: path of page that created the cookie). 32 | * @option String domain The value of the domain attribute of the cookie (default: domain of page that created the cookie). 33 | * @option Boolean secure If true, the secure attribute of the cookie will be set and the cookie transmission will 34 | * require a secure protocol (like HTTPS). 35 | * @type undefined 36 | * 37 | * @name $.cookie 38 | * @cat Plugins/Cookie 39 | * @author Klaus Hartl/klaus.hartl@stilbuero.de 40 | */ 41 | 42 | /** 43 | * Get the value of a cookie with the given name. 44 | * 45 | * @example $.cookie('the_cookie'); 46 | * @desc Get the value of a cookie. 47 | * 48 | * @param String name The name of the cookie. 49 | * @return The value of the cookie. 50 | * @type String 51 | * 52 | * @name $.cookie 53 | * @cat Plugins/Cookie 54 | * @author Klaus Hartl/klaus.hartl@stilbuero.de 55 | */ 56 | jQuery.cookie = function(name, value, options) { 57 | if (typeof value != 'undefined') { // name and value given, set cookie 58 | options = options || {}; 59 | if (value === null) { 60 | value = ''; 61 | options.expires = -1; 62 | } 63 | var expires = ''; 64 | if (options.expires && (typeof options.expires == 'number' || options.expires.toUTCString)) { 65 | var date; 66 | if (typeof options.expires == 'number') { 67 | date = new Date(); 68 | date.setTime(date.getTime() + (options.expires * 24 * 60 * 60 * 1000)); 69 | } else { 70 | date = options.expires; 71 | } 72 | expires = '; expires=' + date.toUTCString(); // use expires attribute, max-age is not supported by IE 73 | } 74 | // CAUTION: Needed to parenthesize options.path and options.domain 75 | // in the following expressions, otherwise they evaluate to undefined 76 | // in the packed version for some reason... 77 | var path = options.path ? '; path=' + (options.path) : ''; 78 | var domain = options.domain ? '; domain=' + (options.domain) : ''; 79 | var secure = options.secure ? '; secure' : ''; 80 | document.cookie = [name, '=', encodeURIComponent(value), expires, path, domain, secure].join(''); 81 | } else { // only name given, get cookie 82 | var cookieValue = null; 83 | if (document.cookie && document.cookie != '') { 84 | var cookies = document.cookie.split(';'); 85 | for (var i = 0; i < cookies.length; i++) { 86 | var cookie = jQuery.trim(cookies[i]); 87 | // Does this cookie string begin with the name we want? 88 | if (cookie.substring(0, name.length + 1) == (name + '=')) { 89 | cookieValue = decodeURIComponent(cookie.substring(name.length + 1)); 90 | break; 91 | } 92 | } 93 | } 94 | return cookieValue; 95 | } 96 | }; -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/yarn/YarnJobCrawlerTask.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.yarn; 2 | 3 | import java.io.BufferedReader; 4 | import java.io.IOException; 5 | import java.io.InputStreamReader; 6 | import java.text.DateFormat; 7 | import java.text.SimpleDateFormat; 8 | import java.util.ArrayList; 9 | import java.util.Date; 10 | import java.util.List; 11 | import java.util.ListIterator; 12 | 13 | import net.sf.json.JSONArray; 14 | import net.sf.json.JSONObject; 15 | 16 | import org.apache.commons.httpclient.HttpClient; 17 | import org.apache.commons.httpclient.HttpException; 18 | import org.apache.commons.httpclient.methods.GetMethod; 19 | import org.apache.commons.httpclient.params.HttpMethodParams; 20 | import org.apache.commons.logging.Log; 21 | import org.apache.commons.logging.LogFactory; 22 | import org.springframework.beans.factory.annotation.Autowired; 23 | import org.springframework.beans.factory.annotation.Value; 24 | import org.springframework.scheduling.annotation.Scheduled; 25 | import org.springframework.stereotype.Component; 26 | 27 | import com.cloudera.org.apache.http.HttpStatus; 28 | import com.ctrip.di.dao.YarnJobService; 29 | import com.ctrip.di.dao.YarnJobsDo; 30 | 31 | /** 32 | * A task to crawl the jobs from yarn cluster and insert into database 33 | * @author xgliao 34 | * 35 | */ 36 | @Component 37 | public class YarnJobCrawlerTask { 38 | private static Log logger = LogFactory.getLog(YarnJobCrawlerTask.class); 39 | 40 | @Autowired 41 | private YarnJobService yarnJobService; 42 | 43 | @Value("${YARN_HISTORY_JOB_URL}") 44 | private String yarnHistoryJobUrl; 45 | 46 | private JSONObject getJson() throws HttpException, IOException { 47 | HttpClient httpClient = new HttpClient(); 48 | GetMethod method = new GetMethod(yarnHistoryJobUrl); 49 | HttpMethodParams methodParams = new HttpMethodParams(); 50 | methodParams.setParameter("Content-Type", "application/json"); 51 | method.setParams(methodParams); 52 | httpClient.executeMethod(method); 53 | StringBuilder sb = new StringBuilder(); 54 | if (method.getStatusCode() == HttpStatus.SC_OK) { 55 | BufferedReader reader = null; 56 | try { 57 | reader = new BufferedReader(new InputStreamReader( 58 | method.getResponseBodyAsStream(), "UTF-8")); 59 | String line; 60 | while ((line = reader.readLine()) != null) { 61 | sb.append(line); 62 | } 63 | } catch (Exception e) { 64 | logger.error("Exception:", e); 65 | throw new RuntimeException("Get json failed,", e); 66 | } finally { 67 | if (reader != null) 68 | reader.close(); 69 | } 70 | } 71 | 72 | JSONObject jsonObject = JSONObject.fromObject(sb.toString()); 73 | 74 | return jsonObject; 75 | } 76 | 77 | @Scheduled(fixedDelay = 7200 * 1000) 78 | public void run() { 79 | try { 80 | long maxStartTime = yarnJobService.getMaxStartTime(); 81 | List jobList = getJobList(maxStartTime); 82 | logger.info("Get yarn job size is " + jobList.size()); 83 | yarnJobService.insertJobs(jobList); 84 | } catch (HttpException e) { 85 | logger.error("Http Exception", e); 86 | } catch (Throwable e) { 87 | logger.error("IOException", e); 88 | } 89 | } 90 | 91 | private List getJobList(long maxStartTime) 92 | throws HttpException, IOException { 93 | List jobsList = new ArrayList(); 94 | 95 | JSONObject json = getJson(); 96 | JSONObject jobs = (JSONObject) json.get("jobs"); 97 | JSONArray jobArray = (JSONArray) jobs.get("job"); 98 | @SuppressWarnings("unchecked") 99 | ListIterator iterator = jobArray.listIterator(); 100 | while (iterator.hasNext()) { 101 | JSONObject jobObject = iterator.next(); 102 | long startTime = jobObject.getLong("startTime"); 103 | if (startTime > maxStartTime) { 104 | YarnJobsDo yarnJobsDo = toYarnJobsDo(jobObject); 105 | jobsList.add(yarnJobsDo); 106 | } 107 | } 108 | return jobsList; 109 | } 110 | 111 | private DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); 112 | 113 | private YarnJobsDo toYarnJobsDo(JSONObject jobObject) { 114 | YarnJobsDo jobDo = new YarnJobsDo(); 115 | jobDo.setStartTime(jobObject.getLong("startTime")); 116 | jobDo.setDateStr(dateFormat.format(new Date(jobObject 117 | .getLong("startTime")))); 118 | jobDo.setFinishTime(jobObject.getLong("finishTime")); 119 | jobDo.setJobId(jobObject.getString("id")); 120 | jobDo.setQueue(jobObject.getString("queue")); 121 | jobDo.setUser(jobObject.getString("user")); 122 | jobDo.setStatus(jobObject.getString("state")); 123 | jobDo.setMapsTotal(jobObject.getInt("mapsTotal")); 124 | jobDo.setReducesTotal(jobObject.getInt("reducesTotal")); 125 | 126 | return jobDo; 127 | } 128 | 129 | } 130 | -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/hive/util/HiveHelper.java: -------------------------------------------------------------------------------- 1 | package com.ctrip.di.hive.util; 2 | 3 | import java.util.ArrayList; 4 | import java.util.List; 5 | 6 | import org.apache.hadoop.hive.conf.HiveConf; 7 | import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; 8 | import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; 9 | import org.apache.hadoop.hive.metastore.api.HiveObjectRef; 10 | import org.apache.hadoop.hive.metastore.api.HiveObjectType; 11 | import org.apache.hadoop.hive.metastore.api.MetaException; 12 | import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; 13 | import org.apache.hadoop.hive.metastore.api.Partition; 14 | import org.apache.hadoop.hive.metastore.api.PrincipalType; 15 | import org.apache.log4j.Logger; 16 | import org.apache.thrift.TException; 17 | import org.springframework.beans.factory.annotation.Value; 18 | import org.springframework.stereotype.Service; 19 | 20 | @Service 21 | public class HiveHelper { 22 | 23 | @Value("${HIVE_CONFIG}") 24 | private String hive_conf; 25 | 26 | private static final Logger logger = Logger.getLogger(HiveHelper.class); 27 | 28 | private static HiveMetaStoreClient hiveClient = null; 29 | 30 | public HiveMetaStoreClient getHiveMetaStoreClient() { 31 | if (hiveClient == null) { 32 | HiveConf conf = new HiveConf(); 33 | conf.addResource(HiveHelper.class.getClassLoader().getResource( 34 | hive_conf)); 35 | 36 | try { 37 | hiveClient = new HiveMetaStoreClient(conf, null); 38 | 39 | } catch (MetaException e) { 40 | logger.error("hive metastore exception: " + e); 41 | } 42 | } 43 | return hiveClient; 44 | } 45 | 46 | public List getHiveTablePartitionList(String dbName, 47 | String tableName) { 48 | 49 | HiveMetaStoreClient hiveClient = getHiveMetaStoreClient(); 50 | 51 | List partitionList = null; 52 | 53 | try { 54 | partitionList = hiveClient.listPartitions(dbName, tableName, 55 | Short.MAX_VALUE); 56 | } catch (NoSuchObjectException e) { 57 | logger.warn("no table named:" + dbName + "." + tableName, e); 58 | } catch (MetaException e) { 59 | logger.error("hive metastore exception: " + e); 60 | } catch (TException e) { 61 | logger.error(e); 62 | } 63 | 64 | return partitionList; 65 | } 66 | 67 | public boolean isHiveTableExist(String tablename) { 68 | 69 | if (!tablename.contains(".")) { 70 | return false; 71 | } 72 | int index = tablename.indexOf("."); 73 | String dbName = tablename.substring(0, index); 74 | String tableName = tablename.substring(index + 1); 75 | 76 | try { 77 | HiveMetaStoreClient hiveClient = getHiveMetaStoreClient(); 78 | return hiveClient.tableExists(dbName, tableName); 79 | } catch (NoSuchObjectException e) { 80 | logger.warn("no table named:" + tablename, e); 81 | } catch (Exception e) { 82 | logger.error(e); 83 | } 84 | return false; 85 | } 86 | 87 | public boolean hasPrivilegeToSetCleanAlert(String database, String table, 88 | String user) { 89 | HiveMetaStoreClient hiveClient = getHiveMetaStoreClient(); 90 | HiveObjectRef hiveObject = new HiveObjectRef(); 91 | hiveObject.setDbName(database); 92 | hiveObject.setObjectName(table); 93 | hiveObject.setObjectType(HiveObjectType.TABLE); 94 | List privileges = new ArrayList(); 95 | try { 96 | privileges = hiveClient.list_privileges(user, PrincipalType.USER, 97 | hiveObject); 98 | } catch (Exception e) { 99 | logger.error("Error to get privileges:", e); 100 | return false; 101 | } 102 | for (HiveObjectPrivilege privilege : privileges) { 103 | String privilegeName = privilege.getGrantInfo().getPrivilege(); 104 | if (privilegeName != null 105 | && ("all".equalsIgnoreCase(privilegeName) 106 | || "create".equalsIgnoreCase(privilegeName) || "ALTER" 107 | .equalsIgnoreCase(privilegeName))) { 108 | return true; 109 | } 110 | } 111 | 112 | return false; 113 | } 114 | 115 | public List getPartitionsByFilter(String dbName, 116 | String tableName, String filter) { 117 | 118 | logger.info("Table:" + dbName + "." + tableName + " filter=" + filter); 119 | HiveMetaStoreClient hiveClient = getHiveMetaStoreClient(); 120 | 121 | List partitionList = null; 122 | 123 | try { 124 | partitionList = hiveClient.listPartitionsByFilter(dbName, 125 | tableName, filter, Short.MAX_VALUE); 126 | } catch (NoSuchObjectException e) { 127 | logger.warn("no table named:" + dbName + "." + tableName, e); 128 | } catch (MetaException e) { 129 | logger.error("hive metastore exception: " + e); 130 | } catch (TException e) { 131 | logger.error(e); 132 | } 133 | 134 | return partitionList; 135 | } 136 | 137 | } -------------------------------------------------------------------------------- /sparksql/src/sparksql/static/js/loading.js: -------------------------------------------------------------------------------- 1 | var Loading = function (element, options) { 2 | this.options = options; 3 | this._element = $(element); 4 | this._uid = 'loading-'+new Date().getTime()+(Math.random()*1e10).toFixed(0); 5 | }; 6 | 7 | Loading.prototype = { 8 | 9 | constructor: Loading, 10 | 11 | show: function () { 12 | var that = this, mask = '', icon = ''; 13 | 14 | if( that._element.find('.loading').length ) return 15 | 16 | if( that._element.css('position')==='static' ){ 17 | that._element.css('position','relative'); 18 | } 19 | 20 | var s = '.loading-mask{background-color:#000;opacity:0.30;filter:alpha(opacity=30);}.loading{display:inline-block;*display:inline;*zoom:1;top:50%;left:50%;padding:20px 40px 16px;background-color:#fff;text-align:center;color:#666;font-size:12px;border:1px solid #888;border-radius:2px;box-shadow:2px 2px 0 #999;}.loading > img{margin:0 10px 0 0;position:relative;top:-2px;}'; 21 | 22 | s = s + that.options.style; 23 | 24 | s = ''; 25 | 26 | if( that.options.mask ){ 27 | mask = '
    '; 28 | } 29 | 30 | if( typeof that.options.icon==='boolean' && that.options.icon ){ 31 | icon = ''; 32 | } 33 | 34 | if( typeof that.options.icon==='string' && that.options.icon ){ 35 | icon = ''; 36 | } 37 | 38 | that._element.append( s+mask+'
    '+icon+''+that.options.text+'
    ' ); 39 | 40 | var w = $('.'+that._uid+'.loading')[0].offsetWidth; 41 | var h = $('.'+that._uid+'.loading')[0].offsetHeight; 42 | 43 | $('.'+that._uid+'.loading-mask').css({ 44 | 'position': that.options.position, 45 | 'z-index': that.options.zindex-1, 46 | 'top': that.options.maskOffset.top, 47 | 'right': that.options.maskOffset.right, 48 | 'left': that.options.maskOffset.left, 49 | 'bottom': that.options.maskOffset.bottom 50 | }); 51 | 52 | if( that.options.position==='fixed' ){ 53 | h = 0; 54 | w = 0; 55 | }else if( that.options.position==='absolute' ){ 56 | h = h/2*(-1); 57 | w = w/2*(-1); 58 | } 59 | 60 | $('.'+that._uid+'.loading').css({ 61 | 'position': that.options.position, 62 | 'z-index': that.options.zindex, 63 | 'margin-top': h+that.options.offsetTop, 64 | 'margin-left': w+that.options.offsetLeft 65 | }); 66 | }, 67 | 68 | hide: function (e) { 69 | e && e.preventDefault(); 70 | if( this.options.fadeout ){ 71 | $('.'+this._uid).fadeOut(400, function(){ 72 | $(this).remove(); 73 | }); 74 | }else{ 75 | $('.'+this._uid).remove(); 76 | } 77 | } 78 | }; 79 | 80 | var old = $.fn.loading; 81 | 82 | $.fn.loading = function (option) { 83 | return this.each(function () { 84 | var $this = $(this), 85 | data = $this.data('loading'), 86 | options = $.extend({}, $.fn.loading.defaults, $this.data(), typeof option == 'object' && option); 87 | 88 | if (!data) $this.data('loading', (data = new Loading(this, options))); 89 | if (typeof option == 'string') 90 | data[option](); 91 | else if 92 | (options.show) data.show(); 93 | }); 94 | }; 95 | 96 | $.fn.loading.defaults = { 97 | show: true, 98 | mask: true, 99 | fadeout: false, 100 | zindex: 1000, 101 | offsetTop: 0, 102 | offsetLeft: 0, 103 | maskOffset: { 104 | top: 0, 105 | right: 0, 106 | left: 0, 107 | bottom: 0 108 | }, 109 | position: 'absolute', 110 | icon: true, 111 | style: '', 112 | text: '正在加载中...' 113 | }; 114 | 115 | $.fn.loading.Constructor = Loading; 116 | 117 | $.fn.loading.noConflict = function () { 118 | $.fn.loading = old; 119 | return this; 120 | }; 121 | 122 | 123 | function loading(){ 124 | container.html('

    正在加载中...
    '); 125 | } -------------------------------------------------------------------------------- /di-data-service/src/main/java/com/ctrip/di/pojo/gen/Cluster.java: -------------------------------------------------------------------------------- 1 | // 2 | // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 3 | // See http://java.sun.com/xml/jaxb 4 | // Any modifications to this file will be lost upon recompilation of the source schema. 5 | // Generated on: 2014.08.19 at 06:42:37 ���� CST 6 | // 7 | 8 | 9 | package com.ctrip.di.pojo.gen; 10 | 11 | import javax.xml.bind.annotation.XmlAccessType; 12 | import javax.xml.bind.annotation.XmlAccessorType; 13 | import javax.xml.bind.annotation.XmlAttribute; 14 | import javax.xml.bind.annotation.XmlElement; 15 | import javax.xml.bind.annotation.XmlRootElement; 16 | import javax.xml.bind.annotation.XmlSchemaType; 17 | import javax.xml.bind.annotation.XmlType; 18 | import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; 19 | import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; 20 | 21 | 22 | /** 23 | *

    Java class for anonymous complex type. 24 | * 25 | *

    The following schema fragment specifies the expected content contained within this class. 26 | * 27 | *

     28 |  * <complexType>
     29 |  *   <complexContent>
     30 |  *     <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
     31 |  *       <sequence>
     32 |  *         <element ref="{}host_list"/>
     33 |  *         <element ref="{}link"/>
     34 |  *         <element ref="{}metrics"/>
     35 |  *       </sequence>
     36 |  *       <attribute name="label" use="required" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
     37 |  *       <attribute name="name" use="required" type="{http://www.w3.org/2001/XMLSchema}NCName" />
     38 |  *     </restriction>
     39 |  *   </complexContent>
     40 |  * </complexType>
     41 |  * 
    42 | * 43 | * 44 | */ 45 | @XmlAccessorType(XmlAccessType.FIELD) 46 | @XmlType(name = "", propOrder = { 47 | "hostList", 48 | "link", 49 | "metrics" 50 | }) 51 | @XmlRootElement(name = "cluster") 52 | public class Cluster { 53 | 54 | @XmlElement(name = "host_list", required = true) 55 | protected HostList hostList; 56 | @XmlElement(required = true) 57 | @XmlSchemaType(name = "anyURI") 58 | protected String link; 59 | @XmlElement(required = true) 60 | protected Metrics metrics; 61 | @XmlAttribute(required = true) 62 | @XmlSchemaType(name = "anySimpleType") 63 | protected String label; 64 | @XmlAttribute(required = true) 65 | @XmlJavaTypeAdapter(CollapsedStringAdapter.class) 66 | @XmlSchemaType(name = "NCName") 67 | protected String name; 68 | 69 | /** 70 | * Gets the value of the hostList property. 71 | * 72 | * @return 73 | * possible object is 74 | * {@link HostList } 75 | * 76 | */ 77 | public HostList getHostList() { 78 | return hostList; 79 | } 80 | 81 | /** 82 | * Sets the value of the hostList property. 83 | * 84 | * @param value 85 | * allowed object is 86 | * {@link HostList } 87 | * 88 | */ 89 | public void setHostList(HostList value) { 90 | this.hostList = value; 91 | } 92 | 93 | /** 94 | * Gets the value of the link property. 95 | * 96 | * @return 97 | * possible object is 98 | * {@link String } 99 | * 100 | */ 101 | public String getLink() { 102 | return link; 103 | } 104 | 105 | /** 106 | * Sets the value of the link property. 107 | * 108 | * @param value 109 | * allowed object is 110 | * {@link String } 111 | * 112 | */ 113 | public void setLink(String value) { 114 | this.link = value; 115 | } 116 | 117 | /** 118 | * Gets the value of the metrics property. 119 | * 120 | * @return 121 | * possible object is 122 | * {@link Metrics } 123 | * 124 | */ 125 | public Metrics getMetrics() { 126 | return metrics; 127 | } 128 | 129 | /** 130 | * Sets the value of the metrics property. 131 | * 132 | * @param value 133 | * allowed object is 134 | * {@link Metrics } 135 | * 136 | */ 137 | public void setMetrics(Metrics value) { 138 | this.metrics = value; 139 | } 140 | 141 | /** 142 | * Gets the value of the label property. 143 | * 144 | * @return 145 | * possible object is 146 | * {@link String } 147 | * 148 | */ 149 | public String getLabel() { 150 | return label; 151 | } 152 | 153 | /** 154 | * Sets the value of the label property. 155 | * 156 | * @param value 157 | * allowed object is 158 | * {@link String } 159 | * 160 | */ 161 | public void setLabel(String value) { 162 | this.label = value; 163 | } 164 | 165 | /** 166 | * Gets the value of the name property. 167 | * 168 | * @return 169 | * possible object is 170 | * {@link String } 171 | * 172 | */ 173 | public String getName() { 174 | return name; 175 | } 176 | 177 | /** 178 | * Sets the value of the name property. 179 | * 180 | * @param value 181 | * allowed object is 182 | * {@link String } 183 | * 184 | */ 185 | public void setName(String value) { 186 | this.name = value; 187 | } 188 | 189 | } 190 | --------------------------------------------------------------------------------