├── DEVNOTES
├── LICENSE
├── Makefile
├── NOTICE
├── README.md
├── bigtop.mk
├── check-env.sh
├── docs
└── logo.jpg
├── package.mk
├── src
├── pkg
│ ├── common
│ │ ├── flume
│ │ │ └── install_flume.sh
│ │ ├── hadoop
│ │ │ ├── conf.pseudo
│ │ │ │ ├── README
│ │ │ │ ├── core-site.xml
│ │ │ │ ├── hadoop-metrics.properties
│ │ │ │ ├── hdfs-site.xml
│ │ │ │ └── mapred-site.xml
│ │ │ ├── conf.secure
│ │ │ │ ├── configuration.xsl
│ │ │ │ ├── core-site.xml
│ │ │ │ ├── hadoop-env.sh
│ │ │ │ ├── hadoop-metrics.properties
│ │ │ │ ├── hadoop-policy.xml
│ │ │ │ ├── hdfs-site.xml
│ │ │ │ ├── log4j.properties
│ │ │ │ ├── mapred-queue-acls.xml
│ │ │ │ ├── mapred-site.xml
│ │ │ │ ├── masters
│ │ │ │ ├── slaves
│ │ │ │ └── taskcontroller.cfg
│ │ │ ├── hadoop-fuse-dfs.1
│ │ │ ├── hadoop.1
│ │ │ ├── hadoop.default
│ │ │ └── install_hadoop.sh
│ │ ├── hbase
│ │ │ ├── hbase.1
│ │ │ └── install_hbase.sh
│ │ ├── hive
│ │ │ ├── hadoop-hive-metastore.default
│ │ │ ├── hadoop-hive-server.default
│ │ │ ├── hadoop-hive.sh
│ │ │ ├── hadoop-hive.sh.suse
│ │ │ ├── hive-site.xml
│ │ │ ├── hive.1
│ │ │ └── install_hive.sh
│ │ ├── oozie
│ │ │ ├── create-package-layout
│ │ │ ├── do-release-build
│ │ │ ├── oozie-env.sh
│ │ │ ├── oozie-examples.sh
│ │ │ ├── oozie.1
│ │ │ └── oozie.init
│ │ ├── pig
│ │ │ ├── install_pig.sh
│ │ │ ├── log4j.properties
│ │ │ ├── pig.1
│ │ │ └── pig.properties
│ │ ├── sqoop
│ │ │ ├── install_sqoop.sh
│ │ │ ├── sqoop-metastore.sh
│ │ │ └── sqoop-metastore.sh.suse
│ │ ├── whirr
│ │ │ ├── install_whirr.sh
│ │ │ └── whirr.1
│ │ └── zookeeper
│ │ │ ├── hadoop-zookeeper.sh
│ │ │ ├── hadoop-zookeeper.sh.suse
│ │ │ ├── install_zookeeper.sh
│ │ │ └── zookeeper.1
│ ├── deb
│ │ ├── flume
│ │ │ ├── changelog
│ │ │ ├── compat
│ │ │ ├── control
│ │ │ ├── copyright
│ │ │ ├── flume-master.init
│ │ │ ├── flume-node.init
│ │ │ ├── flume.docs
│ │ │ ├── flume.manpages
│ │ │ ├── flume.postinst
│ │ │ ├── flume.preinst
│ │ │ └── rules
│ │ ├── hadoop
│ │ │ ├── append_licenses.sh
│ │ │ ├── changelog
│ │ │ ├── compat
│ │ │ ├── control
│ │ │ ├── copyright
│ │ │ ├── dirs
│ │ │ ├── docs
│ │ │ ├── hadoop-conf-pseudo.install
│ │ │ ├── hadoop-conf-pseudo.lintian-overrides
│ │ │ ├── hadoop-conf-pseudo.postinst
│ │ │ ├── hadoop-conf-pseudo.prerm
│ │ │ ├── hadoop-doc.dirs
│ │ │ ├── hadoop-doc.install
│ │ │ ├── hadoop-doc.lintian-overrides
│ │ │ ├── hadoop-fuse.dirs
│ │ │ ├── hadoop-fuse.install
│ │ │ ├── hadoop-fuse.lintian-overrides
│ │ │ ├── hadoop-fuse.manpages
│ │ │ ├── hadoop-native.dirs
│ │ │ ├── hadoop-native.install
│ │ │ ├── hadoop-native.lintian-overrides
│ │ │ ├── hadoop-pipes.dirs
│ │ │ ├── hadoop-pipes.install
│ │ │ ├── hadoop-source.install
│ │ │ ├── hadoop.dirs
│ │ │ ├── hadoop.install
│ │ │ ├── hadoop.lintian-overrides
│ │ │ ├── hadoop.manpages
│ │ │ ├── hadoop.postinst
│ │ │ ├── hadoop.preinst
│ │ │ ├── hadoop.prerm
│ │ │ ├── install_init_scripts.sh
│ │ │ ├── libhdfs-devel.dirs
│ │ │ ├── libhdfs-devel.install
│ │ │ ├── libhdfs.dirs
│ │ │ ├── libhdfs.install
│ │ │ ├── rules
│ │ │ ├── service-init.d.tpl
│ │ │ ├── service-postinst.tpl
│ │ │ ├── service-postrm.tpl
│ │ │ ├── shlibs.local
│ │ │ └── source.lintian-overrides
│ │ ├── hbase
│ │ │ ├── compat
│ │ │ ├── control
│ │ │ ├── copyright
│ │ │ ├── hadoop-hbase-doc.dirs
│ │ │ ├── hadoop-hbase-doc.install
│ │ │ ├── hadoop-hbase.default
│ │ │ ├── hadoop-hbase.dirs
│ │ │ ├── hadoop-hbase.install
│ │ │ ├── hadoop-hbase.manpages
│ │ │ ├── hadoop-hbase.postinst
│ │ │ ├── hadoop-hbase.preinst
│ │ │ ├── hadoop-hbase.prerm
│ │ │ ├── install_init_scripts.sh
│ │ │ ├── rules
│ │ │ ├── service-init.d.tpl
│ │ │ ├── service-postinst.tpl
│ │ │ └── service-postrm.tpl
│ │ ├── hive
│ │ │ ├── changelog
│ │ │ ├── compat
│ │ │ ├── control
│ │ │ ├── copyright
│ │ │ ├── hadoop-hive.default
│ │ │ ├── hadoop-hive.postinst
│ │ │ ├── hadoop-hive.preinst
│ │ │ ├── hadoop-hive.prerm
│ │ │ ├── install_init_scripts.sh
│ │ │ ├── rules
│ │ │ ├── service-init.d.tpl
│ │ │ ├── service-postinst.tpl
│ │ │ └── service-postrm.tpl
│ │ ├── oozie
│ │ │ ├── changelog
│ │ │ ├── compat
│ │ │ ├── control
│ │ │ ├── copyright
│ │ │ ├── oozie.postinst
│ │ │ ├── oozie.postrm
│ │ │ ├── oozie.preinst
│ │ │ └── rules
│ │ ├── pig
│ │ │ ├── changelog
│ │ │ ├── compat
│ │ │ ├── control
│ │ │ ├── copyright
│ │ │ ├── hadoop-pig.postinst
│ │ │ ├── hadoop-pig.preinst
│ │ │ ├── pig.dirs
│ │ │ └── rules
│ │ ├── sqoop
│ │ │ ├── changelog
│ │ │ ├── compat
│ │ │ ├── control
│ │ │ ├── copyright
│ │ │ ├── rules
│ │ │ ├── sqoop-metastore.init
│ │ │ ├── sqoop-metastore.postinst
│ │ │ └── sqoop-metastore.preinst
│ │ ├── whirr
│ │ │ ├── changelog
│ │ │ ├── compat
│ │ │ ├── control
│ │ │ ├── copyright
│ │ │ └── rules
│ │ └── zookeeper
│ │ │ ├── changelog
│ │ │ ├── compat
│ │ │ ├── control
│ │ │ ├── copyright
│ │ │ ├── hadoop-zookeeper-server.init
│ │ │ ├── hadoop-zookeeper.postinst
│ │ │ ├── hadoop-zookeeper.preinst
│ │ │ └── rules
│ └── rpm
│ │ ├── flume
│ │ ├── RPMS
│ │ │ └── .gitignore
│ │ ├── SOURCES
│ │ │ ├── init.d
│ │ │ └── init.d.suse
│ │ ├── SPECS
│ │ │ └── flume.spec
│ │ └── SRPMS
│ │ │ └── .gitignore
│ │ ├── hadoop
│ │ ├── RPMS
│ │ │ └── .gitignore
│ │ ├── SOURCES
│ │ │ ├── hadoop-init.tmpl
│ │ │ └── hadoop-init.tmpl.suse
│ │ └── SPECS
│ │ │ ├── .gitignore
│ │ │ └── hadoop.spec
│ │ ├── hbase
│ │ ├── RPMS
│ │ │ └── .gitignore
│ │ ├── SOURCES
│ │ │ ├── .gitignore
│ │ │ ├── hadoop-hbase.sh
│ │ │ ├── hadoop-hbase.sh.suse
│ │ │ └── hbase.default
│ │ ├── SPECS
│ │ │ ├── .gitignore
│ │ │ └── hbase.spec
│ │ └── SRPMS
│ │ │ └── .gitignore
│ │ ├── hive
│ │ ├── RPMS
│ │ │ └── .gitignore
│ │ ├── SOURCES
│ │ │ └── install_hive.sh
│ │ └── SPECS
│ │ │ ├── .gitignore
│ │ │ └── hive.spec
│ │ ├── oozie
│ │ ├── RPMS
│ │ │ └── .gitignore
│ │ ├── SOURCES
│ │ │ └── .gitignore
│ │ ├── SPECS
│ │ │ └── oozie.spec
│ │ └── SRPMS
│ │ │ └── .gitignore
│ │ ├── pig
│ │ ├── RPMS
│ │ │ └── .gitignore
│ │ └── SPECS
│ │ │ ├── .gitignore
│ │ │ └── pig.spec
│ │ ├── sqoop
│ │ ├── BUILD
│ │ │ └── .gitignore
│ │ ├── RPMS
│ │ │ └── .gitignore
│ │ ├── SOURCES
│ │ │ └── .gitignore
│ │ ├── SPECS
│ │ │ └── sqoop.spec
│ │ └── SRPMS
│ │ │ └── .gitignore
│ │ ├── whirr
│ │ ├── BUILD
│ │ │ └── .gitignore
│ │ ├── RPMS
│ │ │ └── .gitignore
│ │ ├── SOURCES
│ │ │ └── .gitignore
│ │ ├── SPECS
│ │ │ └── whirr.spec
│ │ └── SRPMS
│ │ │ └── .gitignore
│ │ └── zookeeper
│ │ ├── .gitignore
│ │ ├── RPMS
│ │ └── .gitignore
│ │ ├── SPECS
│ │ ├── .gitignore
│ │ └── zookeeper.spec
│ │ └── SRPMS
│ │ └── .gitignore
└── repos
│ └── distributions
└── test
├── MANIFEST.txt
├── NOTICE.txt
├── site
├── pom.xml
└── src
│ └── site
│ ├── apt
│ ├── devguide.apt
│ ├── downloads.apt
│ ├── examples.apt
│ ├── index.apt
│ ├── itest.apt
│ └── userguide.apt
│ ├── fml
│ └── faq.fml
│ ├── resources
│ └── images
│ │ ├── banner.png
│ │ └── itest.png
│ └── site.xml
├── src
├── integration
│ └── sqoop
│ │ ├── pom.xml
│ │ └── src
│ │ └── test
│ │ ├── groovy
│ │ └── com
│ │ │ └── cloudera
│ │ │ └── itest
│ │ │ └── integration
│ │ │ └── sqoop
│ │ │ ├── IntegrationTestSqoopHBase.groovy
│ │ │ └── IntegrationTestSqoopHive.groovy
│ │ └── resources
│ │ ├── hbase-sqoop
│ │ ├── create-table.hxt
│ │ ├── drop-table.hxt
│ │ ├── expected-hbase-output.txt
│ │ ├── mysql-create-db.sql
│ │ ├── mysql-load-db.sql
│ │ └── select-table.hxt
│ │ └── hive-sqoop
│ │ ├── expected-hive-output.txt
│ │ ├── hive-drop-table.hql
│ │ ├── hive-select-table.hql
│ │ ├── mysql-create-db.sql
│ │ └── mysql-load-db.sql
├── itest-common
│ ├── README
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── groovy
│ │ │ └── com
│ │ │ └── cloudera
│ │ │ └── itest
│ │ │ ├── JUnitUtils.groovy
│ │ │ ├── JarContent.groovy
│ │ │ ├── TestListUtils.groovy
│ │ │ ├── junit
│ │ │ └── OrderedParameterized.java
│ │ │ ├── pmanager
│ │ │ ├── AptCmdLinePackageManager.groovy
│ │ │ ├── DEBPackage.groovy
│ │ │ ├── ManagedPackage.groovy
│ │ │ ├── PackageInstance.groovy
│ │ │ ├── PackageManager.groovy
│ │ │ ├── RPMPackage.groovy
│ │ │ ├── YumCmdLinePackageManager.groovy
│ │ │ └── ZypperCmdLinePackageManager.groovy
│ │ │ ├── posix
│ │ │ ├── Alternative.groovy
│ │ │ ├── Service.groovy
│ │ │ └── UGI.groovy
│ │ │ └── shell
│ │ │ ├── OS.groovy
│ │ │ └── Shell.groovy
│ │ └── test
│ │ └── groovy
│ │ └── com
│ │ └── cloudera
│ │ └── itest
│ │ ├── DummyTestError.groovy
│ │ ├── DummyTestFail.groovy
│ │ ├── DummyTestPass.groovy
│ │ ├── JUnitUtilsTest.groovy
│ │ ├── JarContentTest.groovy
│ │ ├── TestListUtilsTest.groovy
│ │ ├── junit
│ │ └── OrderedParameterizedTest.groovy
│ │ ├── pmanager
│ │ └── PackageManagerTest.groovy
│ │ ├── posix
│ │ ├── AlternativeTest.groovy
│ │ ├── ServiceTest.groovy
│ │ └── UGITest.groovy
│ │ └── shell
│ │ └── ShellTest.groovy
└── smokes
│ ├── README
│ ├── flume
│ ├── pom.xml
│ └── src
│ │ └── test
│ │ ├── groovy
│ │ └── com
│ │ │ └── cloudera
│ │ │ └── itest
│ │ │ └── flumesmoke
│ │ │ └── TestFlumeSmoke.groovy
│ │ └── resources
│ │ ├── FlumeSmokeBzip2
│ │ └── flume-site.xml
│ │ ├── FlumeSmokeDeflate
│ │ └── flume-site.xml
│ │ ├── FlumeSmokeGzip
│ │ └── flume-site.xml
│ │ └── events.txt
│ ├── hadoop
│ ├── README
│ ├── pom.xml
│ └── src
│ │ └── test
│ │ ├── groovy
│ │ └── com
│ │ │ └── cloudera
│ │ │ └── itest
│ │ │ ├── hadoopexamples
│ │ │ └── TestHadoopExamples.groovy
│ │ │ ├── hadoopsmoke
│ │ │ └── TestHadoopSmoke.groovy
│ │ │ └── hadooptests
│ │ │ └── TestTestCLI.groovy
│ │ └── resources
│ │ ├── cachedir.jar
│ │ ├── clitest_data
│ │ ├── data120bytes
│ │ ├── data15bytes
│ │ ├── data30bytes
│ │ └── data60bytes
│ │ ├── examples
│ │ ├── ints
│ │ │ ├── file1.txt
│ │ │ └── file2.txt
│ │ └── text
│ │ │ ├── pg11.txt
│ │ │ └── pg2265.txt
│ │ ├── input.txt
│ │ ├── map.sh
│ │ └── testConfCluster.xml
│ ├── hbase
│ ├── pom.xml
│ └── src
│ │ └── test
│ │ └── groovy
│ │ └── com
│ │ └── cloudera
│ │ └── itest
│ │ └── hbasesmoke
│ │ └── TestHbasePigSmoke.groovy
│ ├── hive
│ ├── README
│ ├── pom.xml
│ └── src
│ │ └── test
│ │ ├── groovy
│ │ └── com
│ │ │ └── cloudera
│ │ │ └── itest
│ │ │ └── hivesmoke
│ │ │ ├── HiveBulkScriptExecutor.groovy
│ │ │ ├── IntegrationTestHiveSmokeBulk.groovy
│ │ │ ├── TestHiveSmokeBulk.groovy
│ │ │ └── TestJdbcDriver.java
│ │ └── resources
│ │ ├── a.txt
│ │ ├── scripts
│ │ ├── integration
│ │ │ ├── hbase_joins
│ │ │ │ ├── in
│ │ │ │ └── out
│ │ │ ├── hbase_pushdown
│ │ │ │ ├── in
│ │ │ │ └── out
│ │ │ ├── hbase_queries
│ │ │ │ ├── filter
│ │ │ │ ├── in
│ │ │ │ └── out
│ │ │ └── hbase_stats
│ │ │ │ ├── filter
│ │ │ │ ├── in
│ │ │ │ └── out
│ │ └── ql
│ │ │ ├── authorization_2
│ │ │ ├── filter
│ │ │ ├── in
│ │ │ └── out
│ │ │ ├── auto_join20
│ │ │ ├── filter
│ │ │ ├── in
│ │ │ └── out
│ │ │ ├── basic
│ │ │ ├── filter
│ │ │ ├── in
│ │ │ └── out
│ │ │ ├── bucketizedhiveinputformat
│ │ │ ├── filter
│ │ │ ├── in
│ │ │ └── out
│ │ │ ├── bucketmapjoin5
│ │ │ ├── filter
│ │ │ ├── in
│ │ │ └── out
│ │ │ ├── drop_multi_partitions
│ │ │ ├── in
│ │ │ └── out
│ │ │ ├── groupby_map_ppr_multi_distinct
│ │ │ ├── filter
│ │ │ ├── in
│ │ │ └── out
│ │ │ ├── index_creation
│ │ │ ├── filter
│ │ │ ├── in
│ │ │ └── out
│ │ │ ├── join19
│ │ │ ├── in
│ │ │ └── out
│ │ │ ├── join_filters
│ │ │ ├── filter
│ │ │ ├── in
│ │ │ └── out
│ │ │ ├── load_dyn_part14
│ │ │ ├── filter
│ │ │ ├── in
│ │ │ └── out
│ │ │ ├── merge_dynamic_partition
│ │ │ ├── filter
│ │ │ ├── in
│ │ │ └── out
│ │ │ ├── multi_insert
│ │ │ ├── filter
│ │ │ ├── in
│ │ │ └── out
│ │ │ ├── rcfile_columnar
│ │ │ ├── filter
│ │ │ ├── in
│ │ │ └── out
│ │ │ ├── stats8
│ │ │ ├── filter
│ │ │ ├── in
│ │ │ └── out
│ │ │ ├── union3
│ │ │ ├── filter
│ │ │ ├── in
│ │ │ └── out
│ │ │ └── uniquejoin
│ │ │ ├── filter
│ │ │ ├── in
│ │ │ └── out
│ │ ├── seed.hql
│ │ ├── seed_data_files
│ │ ├── T1.txt
│ │ ├── T2.txt
│ │ ├── T3.txt
│ │ ├── apache.access.2.log
│ │ ├── apache.access.log
│ │ ├── complex.seq
│ │ ├── covar_tab.txt
│ │ ├── create_nested_type.txt
│ │ ├── datatypes.txt
│ │ ├── docurl.txt
│ │ ├── empty1.txt
│ │ ├── empty2.txt
│ │ ├── hive_626_bar.txt
│ │ ├── hive_626_count.txt
│ │ ├── hive_626_foo.txt
│ │ ├── in1.txt
│ │ ├── in2.txt
│ │ ├── in3.txt
│ │ ├── in4.txt
│ │ ├── in5.txt
│ │ ├── in6.txt
│ │ ├── json.txt
│ │ ├── kv1.seq
│ │ ├── kv1.string-sorted.txt
│ │ ├── kv1.txt
│ │ ├── kv1.val.sorted.txt
│ │ ├── kv1_broken.seq
│ │ ├── kv1_cb.txt
│ │ ├── kv1_cc.txt
│ │ ├── kv1kv2.cogroup.txt
│ │ ├── kv2.txt
│ │ ├── kv3.txt
│ │ ├── kv4.txt
│ │ ├── kv5.txt
│ │ ├── kv6.txt
│ │ ├── lt100.sorted.txt
│ │ ├── lt100.txt
│ │ ├── lt100.txt.deflate
│ │ ├── ml-data
│ │ │ ├── README
│ │ │ ├── allbut.pl
│ │ │ ├── mku.sh
│ │ │ ├── u.data
│ │ │ ├── u.genre
│ │ │ ├── u.info
│ │ │ ├── u.item
│ │ │ ├── u.occupation
│ │ │ ├── u.user
│ │ │ ├── u1.base
│ │ │ ├── u1.test
│ │ │ ├── u2.base
│ │ │ ├── u2.test
│ │ │ ├── u3.base
│ │ │ ├── u3.test
│ │ │ ├── u4.base
│ │ │ ├── u4.test
│ │ │ ├── u5.base
│ │ │ ├── u5.test
│ │ │ ├── ua.base
│ │ │ ├── ua.test
│ │ │ ├── ub.base
│ │ │ └── ub.test
│ │ ├── null.txt
│ │ ├── nullfile.txt
│ │ ├── sample-queryplan-in-history.txt
│ │ ├── sample-queryplan.txt
│ │ ├── smb_bucket_input.rc
│ │ ├── smb_bucket_input.txt
│ │ ├── smbbucket_1.rc
│ │ ├── smbbucket_1.txt
│ │ ├── smbbucket_2.rc
│ │ ├── smbbucket_2.txt
│ │ ├── smbbucket_3.rc
│ │ ├── smbbucket_3.txt
│ │ ├── source.txt
│ │ ├── srcbucket0.txt
│ │ ├── srcbucket1.txt
│ │ ├── srcbucket20.txt
│ │ ├── srcbucket21.txt
│ │ ├── srcbucket22.txt
│ │ ├── srcbucket23.txt
│ │ ├── string.txt
│ │ ├── symlink1.txt
│ │ ├── symlink2.txt
│ │ ├── test.dat
│ │ ├── text-en.txt
│ │ └── union_input.txt
│ │ └── test.hql
│ ├── oozie
│ ├── pom.xml
│ └── src
│ │ └── test
│ │ └── groovy
│ │ └── com
│ │ └── cloudera
│ │ └── itest
│ │ └── ooziesmoke
│ │ └── TestOozieSmoke.groovy
│ ├── package
│ ├── pom.xml
│ └── src
│ │ └── test
│ │ ├── groovy
│ │ └── com
│ │ │ └── cloudera
│ │ │ └── itest
│ │ │ └── packagesmoke
│ │ │ ├── CDHServices.groovy
│ │ │ ├── CDHUpgradeSequence.groovy
│ │ │ ├── DeployCDH.groovy
│ │ │ ├── PackageTestCommon.groovy
│ │ │ ├── PackageTestErrorProxy.java
│ │ │ ├── PackageTestMatchers.java
│ │ │ ├── PackageTestRepoMgr.groovy
│ │ │ ├── StateVerifier.groovy
│ │ │ ├── StateVerifierFlume.groovy
│ │ │ ├── StateVerifierHBase.groovy
│ │ │ ├── StateVerifierHDFS.groovy
│ │ │ ├── StateVerifierHue.groovy
│ │ │ ├── StateVerifierMapreduce.groovy
│ │ │ ├── StateVerifierOozie.groovy
│ │ │ ├── StateVerifierSqoop.groovy
│ │ │ ├── StateVerifierZookeeper.groovy
│ │ │ ├── TestPackagesPseudoDistributed.groovy
│ │ │ ├── TestPackagesPseudoDistributedState.groovy
│ │ │ ├── TestPackagesPseudoDistributedUpgrade.groovy
│ │ │ ├── TestPackagesReadiness.groovy
│ │ │ ├── TestPackagesSingleNode.groovy
│ │ │ ├── TestServices.groovy
│ │ │ ├── TestServicesCreateState.groovy
│ │ │ ├── TestServicesCreateStateMissing.groovy
│ │ │ └── TestServicesVerifyState.groovy
│ │ └── resources
│ │ ├── package_data_apt.xml
│ │ ├── package_data_yum.xml
│ │ └── package_data_zypper.xml
│ └── pom.xml
└── suites
├── README
├── common
└── pom.xml
├── conf
├── log4j.configuration
├── pom.xml
└── src
│ └── main
│ └── resources
│ └── com.cloudera.itest.log4j.configuration
├── integration
├── pom.xml
└── sqoop
│ └── pom.xml
├── package
└── pom.xml
└── smokes
├── flume
└── pom.xml
├── hadoop
└── pom.xml
├── hbase
└── pom.xml
├── hive
└── pom.xml
├── oozie
└── pom.xml
├── pig
└── pom.xml
├── pom.xml
└── sqoop
└── pom.xml
/DEVNOTES:
--------------------------------------------------------------------------------
1 | 1. Debian
2 | 1.0. apt-get install -y git subversion build-essential dh-make debhelper devscripts ant ant-optional autoconf automake subversion liblzo2-dev libz-dev sharutils libfuse-dev reprepro
3 |
4 | 2. RedHat
5 | 2.0 yum install -y git subversion fuse-devel fuse fuse-libs
6 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | BASE_DIR :=$(shell pwd)
2 | BUILD_DIR ?=$(BASE_DIR)/build
3 | DL_DIR ?=$(BASE_DIR)/dl
4 | OUTPUT_DIR?=$(BASE_DIR)/output
5 | REPO_DIR ?=$(BASE_DIR)/src
6 |
7 | REQUIRED_DIRS = $(BUILD_DIR) $(DL_DIR) $(OUTPUT_DIR)
8 | _MKDIRS :=$(shell for d in $(REQUIRED_DIRS); \
9 | do \
10 | [ -d $$d ] || mkdir -p $$d; \
11 | done)
12 |
13 | TARGETS:=
14 | TARGETS_HELP:=
15 | TARGETS_CLEAN:=
16 |
17 |
18 | # Default Apache mirror
19 | APACHE_MIRROR ?= http://mirrors.ibiblio.org/apache
20 | CLOUDERA_ARCHIVE ?= http://archive.cloudera.com/tarballs/
21 |
22 | # Include the implicit rules and functions for building packages
23 | include package.mk
24 | include bigtop.mk
25 |
26 | help: package-help
27 |
28 | all: srpm sdeb
29 | world: all
30 |
31 | packages: $(TARGETS)
32 |
33 | help-header:
34 | @echo " targets:"
35 | @echo " all (all TGZs/SRPMS/SDEBS)"
36 | @echo " srpm (all SRPMs)"
37 | @echo " rpm (all RPMs)"
38 | @echo " sdeb (all SDEBs)"
39 | @echo " deb (all DEBs)"
40 | @echo " clean (remove build/output dirs)"
41 | @echo " realclean (remove build/output/dl dirs)"
42 |
43 | package-help: help-header $(TARGETS_HELP)
44 |
45 | clean: $(TARGETS_CLEAN)
46 | -rm -rf $(BUILD_DIR)
47 | -rm -rf $(OUTPUT_DIR)
48 |
49 | realclean: clean
50 | -rm -rf $(DL_DIR)
51 |
52 | srpm: $(TARGETS_SRPM)
53 |
54 | rpm: $(TARGETS_RPM)
55 |
56 | yum: $(TARGETS_YUM)
57 |
58 | apt: $(TARGETS_APT)
59 |
60 | sdeb: $(TARGETS_SDEB)
61 |
62 | deb: $(TARGETS_DEB)
63 |
64 | relnotes: $(TARGETS_RELNOTES)
65 |
66 | checkenv:
67 | ./check-env.sh
68 |
69 | .DEFAULT_GOAL:= help
70 | .PHONY: clean package-help help-header packages all world help srpm sdeb
71 |
--------------------------------------------------------------------------------
/NOTICE:
--------------------------------------------------------------------------------
1 | This product includes software developed by Cloudera, Inc.
2 | (http://www.cloudera.com/).
3 |
4 |
--------------------------------------------------------------------------------
/check-env.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | if [ -z "$JAVA_HOME" ]; then
4 | echo JAVA_HOME is not set
5 | exit 1
6 | fi
7 |
8 | if [ -z "$JAVA5_HOME" ]; then
9 | echo JAVA5_HOME is not set
10 | exit 1
11 | fi
12 |
13 | if [ -z "$FORREST_HOME" ]; then
14 | echo FORREST_HOME is not set
15 | exit 1
16 | fi
17 |
18 | echo Found all necessary env variables.
19 | exit 0
20 |
--------------------------------------------------------------------------------
/docs/logo.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/docs/logo.jpg
--------------------------------------------------------------------------------
/src/pkg/common/hadoop/conf.pseudo/README:
--------------------------------------------------------------------------------
1 | The conf.pseudo example configuration is for a psuedo-distributed cluster.
2 |
3 | A pseudo-distributed cluster is one in which all of the Hadoop daemons
4 | run separately, but on a single node.
5 |
6 | This is a good mode for developers to install on their own machines to
7 | ensure that their jobs will operate correctly on a real cluster.
--------------------------------------------------------------------------------
/src/pkg/common/hadoop/conf.pseudo/core-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | fs.default.name
7 | hdfs://localhost:8020
8 |
9 |
10 |
11 | hadoop.tmp.dir
12 | /var/lib/hadoop/cache/${user.name}
13 |
14 |
15 |
16 |
17 | hadoop.proxyuser.oozie.hosts
18 | *
19 |
20 |
21 | hadoop.proxyuser.oozie.groups
22 | *
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/src/pkg/common/hadoop/conf.pseudo/hdfs-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | dfs.replication
7 | 1
8 |
9 |
10 | dfs.permissions
11 | false
12 |
13 |
14 |
15 | dfs.name.dir
16 | /var/lib/hadoop/cache/hadoop/dfs/name
17 |
18 |
19 |
20 |
21 | dfs.namenode.plugins
22 | org.apache.hadoop.thriftfs.NamenodePlugin
23 | Comma-separated list of namenode plug-ins to be activated.
24 |
25 |
26 |
27 | dfs.datanode.plugins
28 | org.apache.hadoop.thriftfs.DatanodePlugin
29 | Comma-separated list of datanode plug-ins to be activated.
30 |
31 |
32 |
33 | dfs.thrift.address
34 | 0.0.0.0:10090
35 |
36 |
37 |
--------------------------------------------------------------------------------
/src/pkg/common/hadoop/conf.pseudo/mapred-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | mapred.job.tracker
7 | localhost:8021
8 |
9 |
10 |
11 |
12 | mapred.jobtracker.plugins
13 | org.apache.hadoop.thriftfs.ThriftJobTrackerPlugin
14 | Comma-separated list of jobtracker plug-ins to be activated.
15 |
16 |
17 |
18 | jobtracker.thrift.address
19 | 0.0.0.0:9290
20 |
21 |
22 |
--------------------------------------------------------------------------------
/src/pkg/common/hadoop/conf.secure/configuration.xsl:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 | name |
10 | value |
11 | description |
12 |
13 |
14 |
15 | |
16 | |
17 | |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/src/pkg/common/hadoop/conf.secure/hadoop-metrics.properties:
--------------------------------------------------------------------------------
1 | # Configuration of the "dfs" context for null
2 | dfs.class=org.apache.hadoop.metrics.spi.NullContext
3 |
4 | # Configuration of the "dfs" context for file
5 | #dfs.class=org.apache.hadoop.metrics.file.FileContext
6 | #dfs.period=10
7 | #dfs.fileName=/tmp/dfsmetrics.log
8 |
9 | # Configuration of the "dfs" context for ganglia
10 | # dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext
11 | # dfs.period=10
12 | # dfs.servers=localhost:8649
13 |
14 |
15 | # Configuration of the "mapred" context for null
16 | mapred.class=org.apache.hadoop.metrics.spi.NullContext
17 |
18 | # Configuration of the "mapred" context for file
19 | #mapred.class=org.apache.hadoop.metrics.file.FileContext
20 | #mapred.period=10
21 | #mapred.fileName=/tmp/mrmetrics.log
22 |
23 | # Configuration of the "mapred" context for ganglia
24 | # mapred.class=org.apache.hadoop.metrics.ganglia.GangliaContext
25 | # mapred.period=10
26 | # mapred.servers=localhost:8649
27 |
28 |
29 | # Configuration of the "jvm" context for null
30 | jvm.class=org.apache.hadoop.metrics.spi.NullContext
31 |
32 | # Configuration of the "jvm" context for file
33 | #jvm.class=org.apache.hadoop.metrics.file.FileContext
34 | #jvm.period=10
35 | #jvm.fileName=/tmp/jvmmetrics.log
36 |
37 | # Configuration of the "jvm" context for ganglia
38 | # jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext
39 | # jvm.period=10
40 | # jvm.servers=localhost:8649
41 |
42 |
43 | # Configuration of the "ugi" context for null
44 | ugi.class=org.apache.hadoop.metrics.spi.NullContext
45 |
--------------------------------------------------------------------------------
/src/pkg/common/hadoop/conf.secure/masters:
--------------------------------------------------------------------------------
1 | localhost
2 |
--------------------------------------------------------------------------------
/src/pkg/common/hadoop/conf.secure/slaves:
--------------------------------------------------------------------------------
1 | localhost
2 |
--------------------------------------------------------------------------------
/src/pkg/common/hadoop/conf.secure/taskcontroller.cfg:
--------------------------------------------------------------------------------
1 | mapred.local.dir=/var/lib/hadoop/cache/mapred/local
2 | mapreduce.tasktracker.group=mapred
3 | hadoop.log.dir=/var/log/hadoop
4 |
--------------------------------------------------------------------------------
/src/pkg/common/hadoop/hadoop.default:
--------------------------------------------------------------------------------
1 | export HADOOP_HOME=/usr/lib/hadoop
2 | export HADOOP_NAMENODE_USER=hdfs
3 | export HADOOP_SECONDARYNAMENODE_USER=hdfs
4 | export HADOOP_DATANODE_USER=hdfs
5 | export HADOOP_JOBTRACKER_USER=mapred
6 | export HADOOP_TASKTRACKER_USER=mapred
7 | export HADOOP_IDENT_STRING=hadoop
8 | export HADOOP_PID_DIR=/var/run/hadoop
9 |
--------------------------------------------------------------------------------
/src/pkg/common/hive/hadoop-hive-metastore.default:
--------------------------------------------------------------------------------
1 | # The port for Hive metastore daemon to listen to.
2 | # Unfortunatelly, there is no way to specify the interfaces
3 | # to which the daemon binds.
4 | #
5 | #PORT=
6 |
--------------------------------------------------------------------------------
/src/pkg/common/hive/hadoop-hive-server.default:
--------------------------------------------------------------------------------
1 | # The port for Hive server daemon to listen to.
2 | # Unfortunatelly, there is no way to specify the interfaces
3 | # to which the daemon binds.
4 | #
5 | #PORT=
6 |
--------------------------------------------------------------------------------
/src/pkg/common/hive/hive-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 | javax.jdo.option.ConnectionURL
16 | jdbc:derby:;databaseName=/var/lib/hive/metastore/metastore_db;create=true
17 | JDBC connect string for a JDBC metastore
18 |
19 |
20 |
21 | javax.jdo.option.ConnectionDriverName
22 | org.apache.derby.jdbc.EmbeddedDriver
23 | Driver class name for a JDBC metastore
24 |
25 |
26 |
27 | hive.hwi.war.file
28 | /usr/lib/hive/lib/hive-hwi-0.7.0-cdh3u1-SNAPSHOT.war
29 | This is the WAR file with the jsp content for Hive Web Interface
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/src/pkg/common/oozie/oozie-env.sh:
--------------------------------------------------------------------------------
1 | #! /bin/bash
2 | #
3 | # Copyright (c) 2010 Yahoo! Inc. All rights reserved.
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License. See accompanying LICENSE file.
15 | #
16 |
17 | export OOZIE_CONFIG=/etc/oozie
18 | export OOZIE_DATA=/var/lib/oozie
19 | export OOZIE_LOG=/var/log/oozie
20 | export CATALINA_BASE=${OOZIE_DATA}/oozie-server
21 | export CATALINA_TMPDIR=/var/tmp/oozie
22 | export CATALINA_PID=/var/run/oozie/oozie.pid
23 |
24 |
--------------------------------------------------------------------------------
/src/pkg/common/pig/log4j.properties:
--------------------------------------------------------------------------------
1 | # ***** Set root logger level to DEBUG and its only appender to A.
2 | log4j.logger.org.apache.pig=info, A
3 |
4 | # ***** A is set to be a ConsoleAppender.
5 | log4j.appender.A=org.apache.log4j.ConsoleAppender
6 | # ***** A uses PatternLayout.
7 | log4j.appender.A.layout=org.apache.log4j.PatternLayout
8 | log4j.appender.A.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
9 |
--------------------------------------------------------------------------------
/src/pkg/common/pig/pig.properties:
--------------------------------------------------------------------------------
1 | # Pig configuration file. All values can be overwritten by command line arguments.
2 | # see bin/pig -help
3 |
4 | # log4jconf log4j configuration file
5 | # log4jconf=./conf/log4j.properties
6 |
7 | # brief logging (no timestamps)
8 | brief=false
9 |
10 | # clustername, name of the hadoop jobtracker. If no port is defined port 50020 will be used.
11 | #cluster
12 |
13 | #debug level, INFO is default
14 | debug=INFO
15 |
16 | # a file that contains pig script
17 | #file=
18 |
19 | # load jarfile, colon separated
20 | #jar=
21 |
22 | #verbose print all log messages to screen (default to print only INFO and above to screen)
23 | verbose=false
24 |
25 | #exectype local|mapreduce, mapreduce is default
26 | #exectype=mapreduce
27 | # hod realted properties
28 | #ssh.gateway
29 | #hod.expect.root
30 | #hod.expect.uselatest
31 | #hod.command
32 | #hod.config.dir
33 | #hod.param
34 |
35 |
36 | #Do not spill temp files smaller than this size (bytes)
37 | pig.spill.size.threshold=5000000
38 | #EXPERIMENT: Activate garbage collection when spilling a file bigger than this size (bytes)
39 | #This should help reduce the number of files being spilled.
40 | pig.spill.gc.activation.size=40000000
41 |
42 |
43 | ######################
44 | # Everything below this line is Yahoo specific. Note that I've made
45 | # (almost) no changes to the lines above to make merging in from Apache
46 | # easier. Any values I don't want from above I override below.
47 | #
48 | # This file is configured for use with HOD on the production clusters. If you
49 | # want to run pig with a static cluster you will need to remove everything
50 | # below this line and set the cluster value (above) to the
51 | # hostname and port of your job tracker.
52 |
53 | exectype=mapreduce
54 | log.file=
55 |
--------------------------------------------------------------------------------
/src/pkg/common/zookeeper/zookeeper.1:
--------------------------------------------------------------------------------
1 | .\" Process this file with
2 | .\" groff -man -Tascii zookeeper.1
3 | .\"
4 | .TH zookeeper 1 "November 2010 " Linux "User Manuals"
5 |
6 | .SH NAME
7 | \fBzookeeper\fR \- a high-performance coordination service for distributed applications
8 |
9 | .SH SYNOPSIS
10 |
11 | .B zookeeper-client
12 | [-server host:port(,host:port)*] COMMAND [OPTIONS]
13 | .RS 0
14 | .B zookeeper-server
15 |
16 |
17 | .SH DESCRIPTION
18 |
19 | Apache ZooKeeper is a centralized service for maintaining configuration information,
20 | naming, providing distributed synchronization, and providing group services.
21 | All of these kinds of services are used in some form or another by distributed
22 | applications. Each time they are implemented there is a lot of work that goes
23 | into fixing the bugs and race conditions that are inevitable. Because of the
24 | difficulty of implementing these kinds of services, applications initially
25 | usually skimp on them, which make them brittle in the presence of change and
26 | difficult to manage. Even when done correctly, different implementations of
27 | these services lead to management complexity when the applications are deployed.
28 |
29 | For more information about ZooKeeper, see
30 | .RS 0
31 | http://hadoop.apache.org/zookeeper/.
32 |
33 | To find the list of available commands and options for \fBzookeeper-client\fR type:
34 | \fBzookeeper-client help\fR
35 |
36 | .SH ENVIRONMENT
37 |
38 | .IP ZOOKEEPER_HOME
39 | Alternate home directory. Default is /usr/lib/zookeeper.
40 |
41 | .IP ZOOKEEPER_CONF
42 | Alternate conf dir. Default is /etc/zookeeper.
43 |
44 | .IP ZOOPIDFILE
45 | Alternate server PID file. Default is /var/run/zookeeper/zookeeper-server.pid.
46 |
47 | .SH COPYRIGHT
48 | Copyright (C) 2010 The Apache Software Foundation. All rights reserved.
--------------------------------------------------------------------------------
/src/pkg/deb/flume/changelog:
--------------------------------------------------------------------------------
1 | --- This is auto-generated
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/flume/compat:
--------------------------------------------------------------------------------
1 | 6
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/flume/control:
--------------------------------------------------------------------------------
1 | Source: flume
2 | Section: misc
3 | Priority: extra
4 | Maintainer: Alex Newman
5 | Build-Depends: debhelper (>= 6), ant, sun-java6-jdk, ant-optional, git-core
6 | Standards-Version: 3.8.0
7 | Homepage: http://www.cloudera.com
8 |
9 | Package: flume
10 | Architecture: all
11 | Depends: sun-java6-jre, hadoop-zookeeper, adduser, hadoop
12 | Description: reliable, scalable, and manageable distributed data collection application
13 | Flume is a reliable, scalable, and manageable distributed data collection
14 | application for collecting data such as logs and delivering it to data stores
15 | such as Hadoop's HDFS. It can efficiently collect, aggregate, and move large
16 | amounts of log data. It has a simple, but flexible, architecture based on
17 | streaming data flows. It is robust and fault tolerant with tunable reliability
18 | mechanisms and many failover and recovery mechanisms. The system is centrally
19 | managed and allows for intelligent dynamic management. It uses a simple
20 | extensible data model that allows for online analytic applications.
21 |
22 | Package: flume-master
23 | Architecture: all
24 | Depends: flume (= ${source:Version})
25 | Description: central administration point for the flume data collection system
26 | The Flume master daemon is the central administration and data path control
27 | point for flume nodes.
28 |
29 | Package: flume-node
30 | Architecture: all
31 | Depends: flume (= ${source:Version})
32 | Description: core element of Flume's data path that collects and delivers data
33 | The Flume node daemon is a core element of flume's data path and is
34 | responsible for generating, processing, and delivering data.
35 |
--------------------------------------------------------------------------------
/src/pkg/deb/flume/copyright:
--------------------------------------------------------------------------------
1 | This package was debianized by Alex Newman on
2 | Mon, 22 Feb 2010 23:07:14 -0800
3 |
4 | The Cloudera Distribution for Hadoop is from
5 |
6 | License:
7 | Apache 2.0 - see /usr/share/common-licenses/Apache-2.0
8 |
9 | This Debian Package contains the Cloudera Distribution for Hadoop.
10 | This distribution is Copyright 2009, Cloudera, Inc. and is licensed under
11 | the Apache 2.0 License as above. See http://cloudera.com/distribution
12 |
--------------------------------------------------------------------------------
/src/pkg/deb/flume/flume.docs:
--------------------------------------------------------------------------------
1 | DEVNOTES
2 | NOTICE
3 | RELEASENOTES
4 | README
5 | LICENSE
6 |
--------------------------------------------------------------------------------
/src/pkg/deb/flume/flume.manpages:
--------------------------------------------------------------------------------
1 | docs/man/flume.1
2 | docs/man/flume-master.1
3 | docs/man/flume-node.1
4 | docs/man/flume-shell.1
--------------------------------------------------------------------------------
/src/pkg/deb/flume/flume.postinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postinst script for flume
3 |
4 | set -e
5 |
6 | case "$1" in
7 | configure)
8 | # Install config alternatives
9 | update-alternatives --install /etc/flume/conf flume-conf /etc/flume/conf.empty 30
10 |
11 | ;;
12 |
13 | abort-upgrade|abort-remove|abort-deconfigure)
14 | ;;
15 |
16 | *)
17 | echo "postinst called with unknown argument \`$1'" >&2
18 | exit 1
19 | ;;
20 | esac
21 |
22 | #DEBHELPER#
23 |
--------------------------------------------------------------------------------
/src/pkg/deb/flume/flume.preinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # preinst script for flume
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `install'
10 | # * `install'
11 | # * `upgrade'
12 | # * `abort-upgrade'
13 | # for details, see http://www.debian.org/doc/debian-policy/ or
14 | # the debian-policy package
15 |
16 |
17 | case "$1" in
18 | install|upgrade)
19 | if ! getent passwd flume >/dev/null; then
20 | # Adding system user: flume .
21 | adduser \
22 | --system \
23 | --group \
24 | --home /var/run/flume \
25 | --gecos "Flume User" \
26 | --shell /bin/false \
27 | flume >/dev/null
28 | fi
29 | install -d -m 0755 -o flume -g flume /var/run/flume
30 | install -d -m 0755 -o flume -g flume /var/log/flume
31 | ;;
32 |
33 | abort-upgrade)
34 | ;;
35 |
36 | *)
37 | echo "preinst called with unknown argument \`$1'" >&2
38 | exit 1
39 | ;;
40 | esac
41 |
42 | # dh_installdeb will replace this with shell code automatically
43 | # generated by other debhelper scripts.
44 |
45 | #DEBHELPER#
46 |
47 | exit 0
48 |
--------------------------------------------------------------------------------
/src/pkg/deb/flume/rules:
--------------------------------------------------------------------------------
1 | #!/usr/bin/make -f
2 | # -*- makefile -*-
3 |
4 | # Uncomment this to turn on verbose mode.
5 | export DH_VERBOSE=1
6 |
7 | # This has to be exported to make some magic below work.
8 | export DH_OPTIONS
9 |
10 | build_version=${FLUME_VERSION}
11 |
12 | patch: patch-stamp
13 | patch-stamp:
14 | touch $@
15 |
16 | clean:
17 | dh_testdir
18 | dh_testroot
19 | rm -f *-stamp
20 | dh_clean
21 | rm -Rf debian/tmp debian/flume
22 | find debian -name .\*swp -exec rm -f {} \;
23 |
24 | build-indep: build-indep-stamp
25 | build-indep-stamp: patch-stamp
26 | # Temporarily disabled - we'll just use the build from the tarball.
27 | ant -Dversion=$(build_version) -Divy.home=`pwd`/debian/.ivy tar
28 | mkdir -p debian/tmp
29 | tar cf - --exclude=debian/\* . | (cd debian/tmp && tar xf -)
30 | touch $@
31 |
32 | install: install-indep
33 | install-indep:
34 | dh_testdir
35 | dh_testroot
36 | # ant package
37 | sh -x debian/install_flume.sh \
38 | --build-dir=. \
39 | --prefix=debian/flume
40 | # Remove hadoop dependency and instead symlink in the hadoop jar
41 | rm debian/flume/usr/lib/flume/lib/hadoop-core-*
42 | ln -s /usr/lib/hadoop/hadoop-core.jar debian/flume/usr/lib/flume/lib/hadoop-core.jar
43 | dh_install -i
44 |
45 | binary-common:
46 | dh_testdir
47 | dh_testroot
48 | dh_installchangelogs
49 | dh_installdocs
50 | dh_installman
51 | dh_link
52 | dh_strip
53 | dh_compress
54 | dh_fixperms
55 | dh_makeshlibs
56 | dh_installinit
57 | dh_installdeb
58 | dh_shlibdeps
59 | dh_gencontrol
60 | dh_md5sums
61 | dh_builddeb
62 |
63 | binary-indep: build-indep install-indep
64 | $(MAKE) -f debian/rules DH_OPTIONS=-i binary-common
65 |
66 | binary-arch:
67 | binary: binary-indep
68 | .PHONY: build clean binary-indep binary install-indep binary-arc
69 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/append_licenses.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | #
3 | # Copyright 2009 Cloudera, Inc.
4 | #
5 | # Debian prefers all license information to go in /usr/share/doc/copyright
6 | # rather than individual files in /usr/lib/hadoop. This script appends all
7 | # the licenses to the target file and removes the originals.
8 |
9 | set -e
10 |
11 | is_apache_2() {
12 | head $1 | perl -n0 -e 'exit(!(m/Apache/ && m/Version 2\.0/))'
13 | }
14 |
15 | out_file=debian/hadoop/usr/share/doc/hadoop/copyright
16 |
17 | for license in `find debian/hadoop/usr/lib/hadoop/ -name \*LICENSE.txt` ; do
18 | (echo
19 | echo -------------
20 | echo Included license: $(basename $license)
21 | echo -------------
22 | echo
23 | # Check if it's apache 2.0, since lintian gets grumpy if you include
24 | # the full text
25 | if is_apache_2 $license ; then
26 | echo 'Apache 2.0 License - see /usr/share/common-licenses/Apache-2.0'
27 | else
28 | cat $license
29 | fi
30 | ) >> $out_file
31 | rm $license
32 | done
33 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/changelog:
--------------------------------------------------------------------------------
1 | -- NOTE: this gets auto-generated
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/compat:
--------------------------------------------------------------------------------
1 | 6
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/copyright:
--------------------------------------------------------------------------------
1 | This package was debianized by Todd Lipcon on
2 | Tue, 24 Mar 2009 00:33:33 -0400.
3 |
4 | The upstream package is from
5 | The Cloudera Distribution for Hadoop is from
6 |
7 | License:
8 | Apache 2.0 - see /usr/share/common-licenses/Apache-2.0
9 |
10 | This Debian Package contains the Cloudera Distribution for Hadoop.
11 | This distribution is Copyright 2009, Cloudera, Inc. and is licensed under
12 | the Apache 2.0 License as above. See http://cloudera.com/distribution
13 |
14 | This product includes software developed by The Apache Software
15 | Foundation (http://www.apache.org/).
16 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/dirs:
--------------------------------------------------------------------------------
1 | usr/bin
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/docs:
--------------------------------------------------------------------------------
1 | CHANGES.txt
2 | LICENSE.txt
3 | NOTICE.txt
4 | README.txt
5 | README.txt
6 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-conf-pseudo.install:
--------------------------------------------------------------------------------
1 | /etc/hadoop/conf.pseudo
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-conf-pseudo.lintian-overrides:
--------------------------------------------------------------------------------
1 | hadoop-conf-pseudo: new-package-should-close-itp-bug
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-conf-pseudo.postinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postinst script for hadoop
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `configure'
10 | # * `abort-upgrade'
11 | # * `abort-remove' `in-favour'
12 | #
13 | # * `abort-remove'
14 | # * `abort-deconfigure' `in-favour'
15 | # `removing'
16 | #
17 | # for details, see http://www.debian.org/doc/debian-policy/ or
18 | # the debian-policy package
19 |
20 |
21 | case "$1" in
22 | configure)
23 | update-alternatives --install /etc/hadoop/conf hadoop-conf /etc/hadoop/conf.pseudo 30
24 | if [ ! -e /etc/hadoop/conf ]; then
25 | ln -s /etc/hadoop/conf.pseudo /etc/hadoop/conf
26 | fi
27 | nn_dfs_dir="/var/lib/hadoop/cache/hadoop/dfs"
28 | if [ -z "$(ls -A $nn_dfs_dir/name 2>/dev/null)" ]; then
29 | mkdir -p $nn_dfs_dir
30 | chown hdfs:hadoop $nn_dfs_dir
31 | su -s /bin/sh hdfs -c \
32 | 'hadoop --config /etc/hadoop/conf.pseudo namenode -format' 2>/dev/null 1>/dev/null || :
33 | fi
34 | ;;
35 |
36 | abort-upgrade|abort-remove|abort-deconfigure)
37 | ;;
38 |
39 | *)
40 | echo "postinst called with unknown argument \`$1'" >&2
41 | exit 1
42 | ;;
43 | esac
44 |
45 | # dh_installdeb will replace this with shell code automatically
46 | # generated by other debhelper scripts.
47 |
48 | #DEBHELPER#
49 |
50 | exit 0
51 |
52 |
53 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-conf-pseudo.prerm:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # prerm script for hadoop
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `remove'
10 | # * `upgrade'
11 | # * `failed-upgrade'
12 | # * `remove' `in-favour'
13 | # * `deconfigure' `in-favour'
14 | # `removing'
15 | #
16 | # for details, see http://www.debian.org/doc/debian-policy/ or
17 | # the debian-policy package
18 |
19 |
20 | case "$1" in
21 | remove|upgrade|deconfigure)
22 | update-alternatives --remove hadoop-conf /etc/hadoop/conf.pseudo > /dev/null || exit 1
23 | rm -f /etc/hadoop/conf
24 | ;;
25 |
26 | failed-upgrade)
27 | ;;
28 |
29 | *)
30 | echo "prerm called with unknown argument \`$1'" >&2
31 | exit 1
32 | ;;
33 | esac
34 |
35 | # dh_installdeb will replace this with shell code automatically
36 | # generated by other debhelper scripts.
37 |
38 | #DEBHELPER#
39 |
40 | exit 0
41 |
42 |
43 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-doc.dirs:
--------------------------------------------------------------------------------
1 | /usr/share/doc/hadoop-doc/
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-doc.install:
--------------------------------------------------------------------------------
1 | #DOCS#
2 | /usr/share/doc/hadoop-doc
3 |
4 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-doc.lintian-overrides:
--------------------------------------------------------------------------------
1 | hadoop-doc: embedded-javascript-library usr/share/doc/hadoop-doc/cn/skin/prototype.js.gz
2 | hadoop-doc: embedded-javascript-library usr/share/doc/hadoop-doc/skin/prototype.js.gz
3 | hadoop-doc: new-package-should-close-itp-bug
4 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-fuse.dirs:
--------------------------------------------------------------------------------
1 | /usr/lib/hadoop/contrib/fuse-dfs
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-fuse.install:
--------------------------------------------------------------------------------
1 | /usr/bin/hadoop-fuse-dfs
2 | /usr/lib/hadoop/bin/fuse_dfs
3 | /usr/lib/hadoop/contrib/fuse-dfs
4 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-fuse.lintian-overrides:
--------------------------------------------------------------------------------
1 | hadoop-fuse: new-package-should-close-itp-bug
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-fuse.manpages:
--------------------------------------------------------------------------------
1 | debian/hadoop-fuse-dfs.1
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-native.dirs:
--------------------------------------------------------------------------------
1 | /usr/lib/hadoop/lib/native/
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-native.install:
--------------------------------------------------------------------------------
1 | /usr/lib/hadoop/lib/native
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-native.lintian-overrides:
--------------------------------------------------------------------------------
1 | hadoop-native: new-package-should-close-itp-bug
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-pipes.dirs:
--------------------------------------------------------------------------------
1 | /usr/lib
2 | /usr/include
3 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-pipes.install:
--------------------------------------------------------------------------------
1 | /usr/lib/libhadooputils.a
2 | /usr/lib/libhadooppipes.a
3 | /usr/include/hadoop
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop-source.install:
--------------------------------------------------------------------------------
1 | /usr/src/hadoop
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop.dirs:
--------------------------------------------------------------------------------
1 | /usr/lib/hadoop
2 | /var/log/hadoop
3 | /usr/bin
4 | /var/run/hadoop
5 | /var/lib/hadoop
6 | /var/lib/hadoop/cache/hadoop
7 | /etc/default
8 | /usr/share/doc/hadoop
9 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop.install:
--------------------------------------------------------------------------------
1 | /usr/bin
2 | /usr/lib/hadoop
3 | /etc/hadoop/conf.empty
4 | /etc/default
5 | /usr/share/doc/hadoop
6 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop.lintian-overrides:
--------------------------------------------------------------------------------
1 | hadoop: extra-license-file usr/lib/hadoop/LICENSE.txt
2 | hadoop: new-package-should-close-itp-bug
3 | hadoop: shell-script-fails-syntax-check ./usr/lib/hadoop/contrib/hod/bin/hod
4 | hadoop: shell-script-fails-syntax-check ./usr/lib/hadoop/contrib/hod/bin/hodcleanup
5 | hadoop: shell-script-fails-syntax-check ./usr/lib/hadoop/contrib/hod/bin/hodring
6 | hadoop: shell-script-fails-syntax-check ./usr/lib/hadoop/contrib/hod/bin/ringmaster
7 | hadoop: shell-script-fails-syntax-check ./usr/lib/hadoop/contrib/hod/support/logcondense.py
8 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop.manpages:
--------------------------------------------------------------------------------
1 | debian/hadoop.1
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop.postinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postinst script for hadoop
3 |
4 | set -e
5 |
6 | case "$1" in
7 | configure)
8 | # We used to chown /usr/lib/hadoop here, but with security we don't want to do that -
9 | # things like task-controller are very particular about ownership
10 | chown -R root:hadoop /etc/hadoop
11 | chgrp -R hadoop /var/log/hadoop /var/run/hadoop
12 | chmod g+w /var/run/hadoop /var/log/hadoop
13 |
14 | # Change the ownership of old logs so that we don't fail rotation on next startup
15 | find /var/log/hadoop/ | egrep 'jobtracker|tasktracker|userlogs|history' | xargs --no-run-if-empty chown mapred
16 | find /var/log/hadoop/ | egrep 'namenode|datanode' | xargs --no-run-if-empty chown hdfs
17 |
18 | # We don't want to do this recursively since we may be reinstalling, in which case
19 | # users have their own cache/ directories which shouldn't be stolen
20 | chown root:hadoop /var/lib/hadoop/ /var/lib/hadoop/cache/ /var/lib/hadoop/cache/hadoop/
21 | chmod 1777 /var/lib/hadoop/cache/
22 | update-alternatives --install /etc/hadoop/conf hadoop-conf /etc/hadoop/conf.empty 10
23 | ;;
24 |
25 | abort-upgrade|abort-remove|abort-deconfigure)
26 | ;;
27 |
28 | *)
29 | echo "postinst called with unknown argument \`$1'" >&2
30 | exit 1
31 | ;;
32 | esac
33 |
34 | #DEBHELPER#
35 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop.preinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # preinst script for hadoop
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `install'
10 | # * `install'
11 | # * `upgrade'
12 | # * `abort-upgrade'
13 | # for details, see http://www.debian.org/doc/debian-policy/ or
14 | # the debian-policy package
15 |
16 |
17 | case "$1" in
18 | install|upgrade)
19 | addgroup --system hadoop >/dev/null 2>/dev/null || :
20 | if ! getent passwd hdfs >/dev/null; then
21 | # Adding system user: hdfs.
22 | adduser \
23 | --system \
24 | --group \
25 | --home /usr/lib/hadoop \
26 | --gecos "Hadoop HDFS" \
27 | --shell /bin/bash \
28 | hdfs >/dev/null 2>/dev/null || :
29 | usermod --append --groups hadoop hdfs || :
30 | fi
31 | if ! getent passwd mapred >/dev/null; then
32 | adduser \
33 | --system \
34 | --group \
35 | --home /usr/lib/hadoop \
36 | --gecos "Hadoop MapReduce" \
37 | --shell /bin/bash \
38 | mapred >/dev/null 2>/dev/null || :
39 | usermod --append --groups hadoop mapred || :
40 | fi
41 | ;;
42 |
43 | abort-upgrade)
44 | ;;
45 |
46 | *)
47 | echo "preinst called with unknown argument \`$1'" >&2
48 | exit 1
49 | ;;
50 | esac
51 |
52 | # dh_installdeb will replace this with shell code automatically
53 | # generated by other debhelper scripts.
54 |
55 | #DEBHELPER#
56 |
57 | exit 0
58 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/hadoop.prerm:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # prerm script for hadoop
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `remove'
10 | # * `upgrade'
11 | # * `failed-upgrade'
12 | # * `remove' `in-favour'
13 | # * `deconfigure' `in-favour'
14 | # `removing'
15 | #
16 | # for details, see http://www.debian.org/doc/debian-policy/ or
17 | # the debian-policy package
18 |
19 |
20 | case "$1" in
21 | remove|upgrade|deconfigure)
22 | update-alternatives --remove hadoop-conf /etc/hadoop/conf.empty
23 | ;;
24 |
25 | failed-upgrade)
26 | ;;
27 |
28 | *)
29 | echo "prerm called with unknown argument \`$1'" >&2
30 | exit 1
31 | ;;
32 | esac
33 |
34 | # dh_installdeb will replace this with shell code automatically
35 | # generated by other debhelper scripts.
36 |
37 | #DEBHELPER#
38 |
39 | exit 0
40 |
41 |
42 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/install_init_scripts.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | MAJOR_VERSION=${HADOOP_VERSION}
4 | SRC_PKG=hadoop
5 |
6 | namenode_user=hdfs
7 | secondarynamenode_user=hdfs
8 | datanode_user=hdfs
9 | jobtracker_user=mapred
10 | tasktracker_user=mapred
11 |
12 | for node in namenode secondarynamenode jobtracker tasktracker datanode ; do
13 | service_pkgdir=debian/$SRC_PKG-$node
14 | debdir=$service_pkgdir/DEBIAN
15 | template="debian/service-init.d.tpl"
16 | user=$(eval "echo \$${node}_user")
17 | mkdir -p $service_pkgdir/etc/init.d/ $debdir
18 | sed -e "s|@HADOOP_DAEMON@|$node|" -e "s|@HADOOP_MAJOR_VERSION@|$MAJOR_VERSION|" \
19 | -e "s|@DAEMON_USER@|$user|" \
20 | $template > $service_pkgdir/etc/init.d/$SRC_PKG-$node
21 | sed -e "s|@HADOOP_DAEMON@|$node|" -e "s|@HADOOP_MAJOR_VERSION@|$MAJOR_VERSION|" \
22 | -e "s|@DAEMON_USER@|$user|" \
23 | debian/service-postinst.tpl > $debdir/postinst
24 | sed -e "s|@HADOOP_DAEMON@|$node|" -e "s|@HADOOP_MAJOR_VERSION@|$MAJOR_VERSION|" \
25 | -e "s|@DAEMON_USER@|$user|" \
26 | debian/service-postrm.tpl > $debdir/postrm
27 | chmod 755 $service_pkgdir/etc/init.d/* $debdir/postinst $debdir/postrm
28 |
29 | # We aren't making packages for debian itself, so override ITP lintian warnings
30 | mkdir -p $service_pkgdir/usr/share/lintian/overrides
31 | echo "$SRC_PKG-$node: new-package-should-close-itp-bug" > $service_pkgdir/usr/share/lintian/overrides/$SRC_PKG-$node
32 |
33 | done
34 |
35 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/libhdfs-devel.dirs:
--------------------------------------------------------------------------------
1 | /usr/include
2 | /usr/share/doc/libhdfs-devel/examples
3 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/libhdfs-devel.install:
--------------------------------------------------------------------------------
1 | /usr/include/hdfs*
2 | /usr/lib/libhdfs.la
3 | /usr/lib/libhdfs.so
4 | /usr/share/doc/libhdfs-devel
5 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/libhdfs.dirs:
--------------------------------------------------------------------------------
1 | /usr/lib/
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/libhdfs.install:
--------------------------------------------------------------------------------
1 | /usr/lib/libhdfs.so.*
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/service-postinst.tpl:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postinst script for hadoop
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `configure'
10 | # * `abort-upgrade'
11 | # * `abort-remove' `in-favour'
12 | #
13 | # * `abort-remove'
14 | # * `abort-deconfigure' `in-favour'
15 | # `removing'
16 | #
17 | # for details, see http://www.debian.org/doc/debian-policy/ or
18 | # the debian-policy package
19 |
20 |
21 | case "$1" in
22 | configure)
23 | update-rc.d hadoop-@HADOOP_DAEMON@ defaults >/dev/null || exit 1
24 | ;;
25 |
26 | abort-upgrade|abort-remove|abort-deconfigure)
27 | ;;
28 |
29 | *)
30 | echo "postinst called with unknown argument \`$1'" >&2
31 | exit 1
32 | ;;
33 | esac
34 |
35 | exit 0
36 |
37 |
38 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/service-postrm.tpl:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postrm script for hadoop
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `remove'
10 | # * `purge'
11 | # * `upgrade'
12 | # * `failed-upgrade'
13 | # * `abort-install'
14 | # * `abort-install'
15 | # * `abort-upgrade'
16 | # * `disappear'
17 | #
18 | # for details, see http://www.debian.org/doc/debian-policy/ or
19 | # the debian-policy package
20 |
21 |
22 | case "$1" in
23 | purge)
24 | update-rc.d -f hadoop-@HADOOP_DAEMON@ remove > /dev/null || exit 1
25 | ;;
26 | remove|upgrade|failed-upgrade|abort-install|abort-upgrade|disappear)
27 | ;;
28 |
29 | *)
30 | echo "postrm called with unknown argument \`$1'" >&2
31 | exit 1
32 | ;;
33 | esac
34 |
35 | exit 0
36 |
37 |
38 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/shlibs.local:
--------------------------------------------------------------------------------
1 | libjvm 1 sun-java6-bin (>= 6)
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hadoop/source.lintian-overrides:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/deb/hadoop/source.lintian-overrides
--------------------------------------------------------------------------------
/src/pkg/deb/hbase/compat:
--------------------------------------------------------------------------------
1 | 6
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hbase/control:
--------------------------------------------------------------------------------
1 | Source: hadoop-hbase
2 | Section: misc
3 | Priority: extra
4 | Maintainer: Alex Newman
5 | Build-Depends: debhelper (>= 6), sun-java6-jdk, git-core, adduser
6 | Standards-Version: 3.8.0
7 | Homepage: http://hadoop.apache.org/hbase/
8 |
9 | Package: hadoop-hbase
10 | Architecture: all
11 | Depends: adduser, sun-java6-jre, hadoop-zookeeper (>= 3.3.1), hadoop
12 | Recommends: ntp
13 | Description: HBase is the Hadoop database
14 | Use it when you need random, realtime read/write access to your Big Data.
15 | This project's goal is the hosting of very large tables -- billions of rows
16 | X millions of columns -- atop clusters of commodity hardware.
17 |
18 | Package: hadoop-hbase-doc
19 | Architecture: all
20 | Section: doc
21 | Description: Documentation for HBase
22 | This package contains the HBase manual and JavaDoc.
23 |
24 | Package: hadoop-hbase-master
25 | Architecture: all
26 | Depends: hadoop-hbase (= ${source:Version})
27 | Description: HMaster is the "master server" for a HBase
28 | There is only one HMaster for a single HBase deployment.
29 |
30 | Package: hadoop-hbase-regionserver
31 | Architecture: all
32 | Depends: hadoop-hbase (= ${source:Version})
33 | Description: HRegionServer makes a set of HRegions available to clients
34 | It checks in with the HMaster. There are many HRegionServers in a single
35 | HBase deployment.
36 |
37 | Package: hadoop-hbase-thrift
38 | Architecture: all
39 | Depends: hadoop-hbase (= ${source:Version})
40 | Description: Provides an HBase Thrift service
41 | This package provides a Thrift service interface to the HBase distributed
42 | database.
43 |
--------------------------------------------------------------------------------
/src/pkg/deb/hbase/copyright:
--------------------------------------------------------------------------------
1 | This package was debianized by Alex Newman on
2 | Mon, 22 Feb 2010 23:07:14 -0800
3 |
4 | The upstream package is from
5 | The Cloudera Distribution for Hadoop is from
6 |
7 | License:
8 | Apache 2.0 - see /usr/share/common-licenses/Apache-2.0
9 |
10 | This Debian Package contains the Cloudera Distribution for Hadoop.
11 | This distribution is Copyright 2009, Cloudera, Inc. and is licensed under
12 | the Apache 2.0 License as above. See http://cloudera.com/distribution
13 |
14 | This product includes software developed by The Apache Software
15 | Foundation (http://www.apache.org/).
16 |
--------------------------------------------------------------------------------
/src/pkg/deb/hbase/hadoop-hbase-doc.dirs:
--------------------------------------------------------------------------------
1 | /usr/share/doc/hadoop-hbase-doc/
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hbase/hadoop-hbase-doc.install:
--------------------------------------------------------------------------------
1 | /usr/share/doc/hadoop-hbase-doc
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hbase/hadoop-hbase.default:
--------------------------------------------------------------------------------
1 | export HADOOP_HOME=/usr/lib/hadoop
2 | export HBASE_HOME=/usr/lib/hbase
3 |
--------------------------------------------------------------------------------
/src/pkg/deb/hbase/hadoop-hbase.dirs:
--------------------------------------------------------------------------------
1 | /usr/bin
2 | /var/log/hbase
3 | /etc/default
4 |
--------------------------------------------------------------------------------
/src/pkg/deb/hbase/hadoop-hbase.install:
--------------------------------------------------------------------------------
1 | /usr/lib/hbase
2 | /usr/bin/hbase
3 | /etc/hbase
4 |
--------------------------------------------------------------------------------
/src/pkg/deb/hbase/hadoop-hbase.manpages:
--------------------------------------------------------------------------------
1 | debian/hbase.1
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/hbase/hadoop-hbase.postinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postinst script for hbase
3 |
4 | set -e
5 |
6 | case "$1" in
7 | configure)
8 | install -d -m 0755 -o hbase -g hbase /var/log/hbase
9 | install -d -m 0755 -o hbase -g hbase /var/run/hbase
10 | rm -f /usr/lib/hbase/logs || :
11 | rm -f /usr/lib/hbase/pids || :
12 |
13 | chown hbase:hbase -R /var/run/hbase /var/log/hbase
14 | ln -s /var/log/hbase /usr/lib/hbase/logs
15 | ln -s /var/run/hbase /usr/lib/hbase/pids
16 |
17 | ;;
18 |
19 | abort-upgrade|abort-remove|abort-deconfigure)
20 | ;;
21 |
22 | *)
23 | echo "postinst called with unknown argument \`$1'" >&2
24 | exit 1
25 | ;;
26 | esac
27 |
28 | #DEBHELPER#
29 |
--------------------------------------------------------------------------------
/src/pkg/deb/hbase/hadoop-hbase.preinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # preinst script for hbase
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `install'
10 | # * `install'
11 | # * `upgrade'
12 | # * `abort-upgrade'
13 | # for details, see http://www.debian.org/doc/debian-policy/ or
14 | # the debian-policy package
15 |
16 |
17 | case "$1" in
18 | install|upgrade)
19 | if ! getent passwd hbase >/dev/null; then
20 | # Adding system user: hbase.
21 | adduser \
22 | --system \
23 | --group \
24 | --home /var/lib/hbase \
25 | --gecos "HBase User" \
26 | --shell /bin/bash \
27 | hbase >/dev/null
28 | fi
29 | ;;
30 |
31 | abort-upgrade)
32 | ;;
33 |
34 | *)
35 | echo "preinst called with unknown argument \`$1'" >&2
36 | exit 1
37 | ;;
38 | esac
39 |
40 | # dh_installdeb will replace this with shell code automatically
41 | # generated by other debhelper scripts.
42 |
43 | #DEBHELPER#
44 |
45 | exit 0
46 |
--------------------------------------------------------------------------------
/src/pkg/deb/hbase/hadoop-hbase.prerm:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # prerm script for hbase
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `remove'
10 | # * `upgrade'
11 | # * `failed-upgrade'
12 | # * `remove' `in-favour'
13 | # * `deconfigure' `in-favour'
14 | # `removing'
15 | #
16 | # for details, see http://www.debian.org/doc/debian-policy/ or
17 | # the debian-policy package
18 |
19 |
20 | case "$1" in
21 | remove|upgrade|deconfigure)
22 | unlink /usr/lib/hbase/logs
23 | ;;
24 |
25 | failed-upgrade)
26 | ;;
27 |
28 | *)
29 | echo "prerm called with unknown argument \`$1'" >&2
30 | exit 1
31 | ;;
32 | esac
33 |
34 | # dh_installdeb will replace this with shell code automatically
35 | # generated by other debhelper scripts.
36 |
37 | #DEBHELPER#
38 |
39 | exit 0
40 |
41 |
42 |
--------------------------------------------------------------------------------
/src/pkg/deb/hbase/install_init_scripts.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | SRC_PKG=hadoop-hbase
4 | for node in master regionserver thrift ; do
5 | service_pkgdir=debian/$SRC_PKG-$node
6 | debdir=$service_pkgdir/DEBIAN
7 | template="debian/service-init.d.tpl"
8 |
9 | mkdir -p $service_pkgdir/etc/init.d/ $debdir
10 | sed -e "s|@HBASE_DAEMON@|$node|" $template > $service_pkgdir/etc/init.d/$SRC_PKG-$node
11 | sed -e "s|@HBASE_DAEMON@|$node|" debian/service-postinst.tpl > $debdir/postinst
12 | sed -e "s|@HBASE_DAEMON@|$node|" debian/service-postrm.tpl > $debdir/postrm
13 | echo /etc/init.d/$SRC_PKG-$node > $debdir/conffiles
14 | chmod 755 $debdir/postinst $debdir/postrm $service_pkgdir/etc/init.d*
15 |
16 | mkdir -p $service_pkgdir/usr/share/lintian/overrides
17 | echo "$SRC_PKG-$node: new-package-should-close-itp-bug" > $service_pkgdir/usr/share/lintian/overrides/$SRC_PKG-$node
18 |
19 | done
20 |
21 |
--------------------------------------------------------------------------------
/src/pkg/deb/hbase/service-postinst.tpl:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postinst script for hbase
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `configure'
10 | # * `abort-upgrade'
11 | # * `abort-remove' `in-favour'
12 | #
13 | # * `abort-remove'
14 | # * `abort-deconfigure' `in-favour'
15 | # `removing'
16 | #
17 | # for details, see http://www.debian.org/doc/debian-policy/ or
18 | # the debian-policy package
19 |
20 |
21 | case "$1" in
22 | configure)
23 | update-rc.d hadoop-hbase-@HBASE_DAEMON@ defaults >/dev/null || exit 1
24 | ;;
25 |
26 | abort-upgrade|abort-remove|abort-deconfigure)
27 | ;;
28 |
29 | *)
30 | echo "postinst called with unknown argument \`$1'" >&2
31 | exit 1
32 | ;;
33 | esac
34 |
35 | exit 0
36 |
37 |
38 |
--------------------------------------------------------------------------------
/src/pkg/deb/hbase/service-postrm.tpl:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postrm script for hbase
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `remove'
10 | # * `purge'
11 | # * `upgrade'
12 | # * `failed-upgrade'
13 | # * `abort-install'
14 | # * `abort-install'
15 | # * `abort-upgrade'
16 | # * `disappear'
17 | #
18 | # for details, see http://www.debian.org/doc/debian-policy/ or
19 | # the debian-policy package
20 |
21 |
22 | case "$1" in
23 | purge)
24 | update-rc.d -f hadoop-hbase-@HBASE_DAEMON@ remove > /dev/null || exit 1
25 | ;;
26 | remove|upgrade|failed-upgrade|abort-install|abort-upgrade|disappear)
27 | ;;
28 |
29 | *)
30 | echo "postrm called with unknown argument \`$1'" >&2
31 | exit 1
32 | ;;
33 | esac
34 |
35 | exit 0
36 |
37 |
38 |
--------------------------------------------------------------------------------
/src/pkg/deb/hive/changelog:
--------------------------------------------------------------------------------
1 | hive (0.3.0-0cloudera0.3.0) cloudera; urgency=low
2 |
3 | * New upstream
4 |
5 | -- Todd Lipcon Thu, 04 Jun 2009 15:42:42 -0700
6 |
7 | hive (0.3~svn759018-0cloudera0.3.0) cloudera; urgency=low
8 |
9 | * New upstream
10 |
11 | -- Mon, 06 Apr 2009 04:24:57 +0000
12 |
13 | hive (0~svn748058-0cloudera0.3.0) cloudera; urgency=low
14 |
15 | * Initial release
16 |
17 | -- Todd Lipcon Thu, 02 Apr 2009 23:36:13 -0400
18 |
--------------------------------------------------------------------------------
/src/pkg/deb/hive/compat:
--------------------------------------------------------------------------------
1 | 6
--------------------------------------------------------------------------------
/src/pkg/deb/hive/control:
--------------------------------------------------------------------------------
1 | Source: hadoop-hive
2 | Section: misc
3 | Priority: extra
4 | Maintainer: Todd Lipcon
5 | Build-Depends: debhelper (>= 6), ant, sun-java6-jdk, python-support, git-core
6 | Standards-Version: 3.8.0
7 | Homepage: http://hadoop.apache.org/hive/
8 |
9 | Package: hadoop-hive
10 | Architecture: all
11 | Depends: adduser, sun-java6-jre, hadoop
12 | Description: A data warehouse infrastructure built on top of Hadoop
13 | Hive is a data warehouse infrastructure built on top of Hadoop that
14 | provides tools to enable easy data summarization, adhoc querying and
15 | analysis of large datasets data stored in Hadoop files. It provides a
16 | mechanism to put structure on this data and it also provides a simple
17 | query language called Hive QL which is based on SQL and which enables
18 | users familiar with SQL to query this data. At the same time, this
19 | language also allows traditional map/reduce programmers to be able to
20 | plug in their custom mappers and reducers to do more sophisticated
21 | analysis which may not be supported by the built-in capabilities of
22 | the language.
23 |
24 | Package: python-hive
25 | Architecture: all
26 | Section: python
27 | Depends: ${python:Depends}
28 | Provides: ${python:Provides}
29 | XS-Python-Version: >= 2.4
30 | Description: Python client library to talk to the Hive Metastore
31 | This is a generated Thrift client to talk to the Hive Metastore.
32 |
33 | Package: hadoop-hive-server
34 | Architecture: all
35 | Depends: hadoop-hive (= ${source:Version})
36 | Description: Provides a Hive Thrift service
37 | This optional package hosts a Thrift server for Hive clients across a network to use.
38 |
39 | Package: hadoop-hive-metastore
40 | Architecture: all
41 | Depends: hadoop-hive (= ${source:Version})
42 | Description: Shared metadata repository for Hive
43 | This optional package hosts a metadata server for Hive clients across a network to use.
44 |
--------------------------------------------------------------------------------
/src/pkg/deb/hive/copyright:
--------------------------------------------------------------------------------
1 | This package was debianized by Todd Lipcon on
2 | Tue, 24 Mar 2009 00:33:33 -0400.
3 |
4 | The upstream package is from
5 | The Cloudera Distribution for Hadoop is from
6 |
7 | License:
8 | Apache 2.0 - see /usr/share/common-licenses/Apache-2.0
9 |
10 | This Debian Package contains the Cloudera Distribution for Hadoop.
11 | This distribution is Copyright 2009, Cloudera, Inc. and is licensed under
12 | the Apache 2.0 License as above. See http://cloudera.com/distribution
13 |
14 | This product includes software developed by The Apache Software
15 | Foundation (http://www.apache.org/).
16 |
--------------------------------------------------------------------------------
/src/pkg/deb/hive/hadoop-hive.default:
--------------------------------------------------------------------------------
1 | # The port for Hive @HIVE_DAEMON@ daemon to listen to.
2 | # Unfortunatelly, there is no way to specify the interfaces
3 | # to which the daemon binds.
4 | #
5 | # PORT=12345
6 |
--------------------------------------------------------------------------------
/src/pkg/deb/hive/hadoop-hive.postinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postinst script for hive
3 |
4 | set -e
5 |
6 | case "$1" in
7 | configure)
8 | # Install config alternatives
9 | update-alternatives --install /etc/hive/conf hive-conf /etc/hive/conf.dist 30
10 |
11 | # Set up directories on HDFS
12 | # TODO(todd) should this stuff be something like "hive create-warehosue"?
13 | su -s /bin/bash - ${hadoop_username} -c 'hadoop fs -mkdir /tmp' 1>/dev/null 2>/dev/null || :
14 | su -s /bin/bash - ${hadoop_username} -c 'hadoop fs -mkdir /user/hive/warehouse' 1>/dev/null 2>/dev/null || :
15 | su -s /bin/bash - ${hadoop_username} -c 'hadoop fs -chmod g+w /tmp' 1>/dev/null 2>/dev/null || :
16 | su -s /bin/bash - ${hadoop_username} -c 'hadoop fs -chmod g+w /user/hive/warehouse' 1>/dev/null 2>/dev/null || :
17 |
18 | # Ensure sticky bit on metastore dir - debian likes this to be done in postinst rather than the package
19 | chmod 1777 /var/lib/hive/metastore
20 |
21 | if [ "$2" ]; then
22 | old_metastore="/var/lib/hive/metastore/\${user.name}_db"
23 | new_metastore="/var/lib/hive/metastore/metastore_db"
24 | if [ -d $old_metastore ]; then
25 | mv $old_metastore $new_metastore || echo "Failed to automatically rename old metastore. Make sure to resolve this before running Hive."
26 | fi
27 | fi
28 | ;;
29 |
30 | abort-upgrade|abort-remove|abort-deconfigure)
31 | ;;
32 |
33 | *)
34 | echo "postinst called with unknown argument \`$1'" >&2
35 | exit 1
36 | ;;
37 | esac
38 |
39 | #DEBHELPER#
40 |
--------------------------------------------------------------------------------
/src/pkg/deb/hive/hadoop-hive.preinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # preinst script for hive
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | case "$1" in
9 | install|upgrade)
10 | if ! getent passwd hive >/dev/null; then
11 | # Adding system user: hive.
12 | adduser \
13 | --system \
14 | --group \
15 | --home /var/lib/hive \
16 | --gecos "Hive User" \
17 | --shell /bin/false \
18 | hive >/dev/null
19 | fi
20 |
21 | install -d -m 0755 -o hive -g hive /var/log/hive
22 | install -d -m 0755 -o hive -g hive /var/run/hive
23 |
24 | ;;
25 |
26 | abort-upgrade)
27 | ;;
28 |
29 | *)
30 | echo "preinst called with unknown argument \`$1'" >&2
31 | exit 1
32 | ;;
33 | esac
34 |
35 | # dh_installdeb will replace this with shell code automatically
36 | # generated by other debhelper scripts.
37 |
38 | #DEBHELPER#
39 |
40 | exit 0
41 |
--------------------------------------------------------------------------------
/src/pkg/deb/hive/hadoop-hive.prerm:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # prerm script for hive
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | case "$1" in
9 | remove|upgrade|deconfigure)
10 | rmdir /var/log/hive 2>/dev/null || :
11 | rmdir /var/run/hive 2>/dev/null || :
12 | ;;
13 |
14 | failed-upgrade)
15 | ;;
16 |
17 | *)
18 | echo "prerm called with unknown argument \`$1'" >&2
19 | exit 1
20 | ;;
21 | esac
22 |
23 | # dh_installdeb will replace this with shell code automatically
24 | # generated by other debhelper scripts.
25 |
26 | #DEBHELPER#
27 |
28 | exit 0
29 |
30 |
31 |
--------------------------------------------------------------------------------
/src/pkg/deb/hive/install_init_scripts.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | SRC_PKG=hadoop-hive
4 | for node in server metastore ; do
5 | service_pkgdir=debian/$SRC_PKG-$node
6 | debdir=$service_pkgdir/DEBIAN
7 | template="debian/service-init.d.tpl"
8 |
9 | mkdir -p $service_pkgdir/etc/init.d/ $service_pkgdir/etc/default/ $debdir
10 | sed -e "s|@HIVE_DAEMON@|$node|" $template > $service_pkgdir/etc/init.d/$SRC_PKG-$node
11 | sed -e "s|@HIVE_DAEMON@|$node|" debian/hadoop-hive.default > $service_pkgdir/etc/default/$SRC_PKG-$node
12 | sed -e "s|@HIVE_DAEMON@|$node|" debian/service-postinst.tpl > $debdir/postinst
13 | sed -e "s|@HIVE_DAEMON@|$node|" debian/service-postrm.tpl > $debdir/postrm
14 | chmod 755 $debdir/postinst $debdir/postrm $service_pkgdir/etc/init.d*
15 |
16 | mkdir -p $service_pkgdir/usr/share/lintian/overrides
17 | echo "$SRC_PKG-$node: new-package-should-close-itp-bug" > $service_pkgdir/usr/share/lintian/overrides/$SRC_PKG-$node
18 |
19 | done
20 |
--------------------------------------------------------------------------------
/src/pkg/deb/hive/rules:
--------------------------------------------------------------------------------
1 | #!/usr/bin/make -f
2 | # -*- makefile -*-
3 |
4 | # Uncomment this to turn on verbose mode.
5 | #export DH_VERBOSE=1
6 |
7 | # This has to be exported to make some magic below work.
8 | export DH_OPTIONS
9 |
10 | clean:
11 | dh_testdir
12 | dh_testroot
13 | ant -f src/build.xml clean
14 | rm -f *-stamp
15 | rm -Rf debian/.ivy
16 | dh_clean
17 |
18 | build-indep: build-indep-stamp
19 | build-indep-stamp:
20 | mkdir -p /tmp/debian-hive/.ivy
21 | ant -f src/build.xml -Divy.home=`pwd`/debian/.ivy package
22 | touch $@
23 |
24 | install: install-indep
25 | install-indep:
26 | dh_testdir
27 | dh_testroot
28 | dh_clean -k -i
29 | dh_installdirs -i
30 | cp debian/hive-site.xml .
31 | cp debian/hive.1 .
32 | sh debian/install_hive.sh \
33 | --prefix=debian/hadoop-hive \
34 | --build-dir=src/build/dist \
35 | --doc-dir=debian/hadoop-hive/usr/share/doc/hive \
36 | --python-dir=debian/python-hive/usr/share/python-support/python-hive/hive
37 | dh_pysupport
38 | dh_install -i
39 | sh debian/install_init_scripts.sh
40 | ls -ld debian/hadoop-hive/var/lib/hive/metastore
41 | (dh_lintian) || /bin/true
42 |
43 | binary-common:
44 | dh_testdir
45 | dh_testroot
46 | dh_installchangelogs
47 | dh_installdocs
48 | # dh_installexamples
49 | # dh_installmenu
50 | # dh_installdebconf
51 | # dh_installlogrotate
52 | # dh_installemacsen
53 | # dh_installpam
54 | # dh_installmime
55 | # dh_python
56 | # dh_installinit
57 | # dh_installcron
58 | # dh_installinfo
59 | dh_installman
60 | dh_link
61 | dh_strip
62 | dh_compress
63 | dh_fixperms
64 | # dh_perl
65 | dh_makeshlibs
66 | dh_installdeb
67 | dh_shlibdeps
68 | dh_gencontrol
69 | dh_md5sums
70 | dh_builddeb
71 |
72 | binary-indep: build-indep install-indep
73 | $(MAKE) -f debian/rules DH_OPTIONS=-i binary-common
74 |
75 | binary-arch:
76 |
77 |
78 | binary: binary-indep
79 | .PHONY: build clean binary-indep binary install-indep binary-arch
80 |
--------------------------------------------------------------------------------
/src/pkg/deb/hive/service-postinst.tpl:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postinst script for hive
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | case "$1" in
9 | configure)
10 | update-rc.d hadoop-hive-@HIVE_DAEMON@ defaults >/dev/null || exit 1
11 | ;;
12 |
13 | abort-upgrade|abort-remove|abort-deconfigure)
14 | ;;
15 |
16 | *)
17 | echo "postinst called with unknown argument \`$1'" >&2
18 | exit 1
19 | ;;
20 | esac
21 |
22 | exit 0
23 |
24 |
25 |
--------------------------------------------------------------------------------
/src/pkg/deb/hive/service-postrm.tpl:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postrm script for hive
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | case "$1" in
9 | purge)
10 | update-rc.d -f hadoop-hive-@HIVE_DAEMON@ remove > /dev/null || exit 1
11 | ;;
12 | upgrade)
13 | service hadoop-hive-@HIVE_DAEMON@ condrestart >/dev/null || :
14 | ;;
15 | remove|failed-upgrade|abort-install|abort-upgrade|disappear)
16 | ;;
17 |
18 | *)
19 | echo "postrm called with unknown argument \`$1'" >&2
20 | exit 1
21 | ;;
22 | esac
23 |
24 | exit 0
25 |
26 |
27 |
--------------------------------------------------------------------------------
/src/pkg/deb/oozie/changelog:
--------------------------------------------------------------------------------
1 | --- This is auto-generated
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/oozie/compat:
--------------------------------------------------------------------------------
1 | 6
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/oozie/copyright:
--------------------------------------------------------------------------------
1 | This package was debianized by Arvind Prabhakar on
2 | Thu, 17 Jun 2010 00:33:33 -0400.
3 |
4 | The upstream package is from
5 | The Cloudera Distribution for Hadoop is from
6 |
7 | License:
8 | Apache 2.0 - see /usr/share/common-licenses/Apache-2.0
9 |
10 | This Debian Package contains the Cloudera Distribution for Hadoop.
11 | This distribution is Copyright 2010, Cloudera, Inc. and is licensed under
12 | the Apache 2.0 License as above. See http://cloudera.com/distribution
13 |
14 | This product includes software developed by The Apache Software
15 | Foundation (http://www.apache.org/).
16 |
--------------------------------------------------------------------------------
/src/pkg/deb/oozie/oozie.postinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postinst script for oozie
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `configure'
10 | # * `abort-upgrade'
11 | # * `abort-remove' `in-favour'
12 | #
13 | # * `abort-remove'
14 | # * `abort-deconfigure' `in-favour'
15 | # `removing'
16 | #
17 | # for details, see http://www.debian.org/doc/debian-policy/ or
18 | # the debian-policy package
19 |
20 |
21 | case "$1" in
22 | configure)
23 | chown -R oozie:oozie /var/lib/oozie
24 | update-rc.d oozie defaults >/dev/null || exit 1
25 | ;;
26 |
27 | abort-upgrade|abort-remove|abort-deconfigure)
28 | ;;
29 |
30 | *)
31 | echo "postinst called with unknown argument \`$1'" >&2
32 | exit 1
33 | ;;
34 | esac
35 |
36 | exit 0
37 |
38 |
--------------------------------------------------------------------------------
/src/pkg/deb/oozie/oozie.postrm:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postrm script for oozie
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `remove'
10 | # * `purge'
11 | # * `upgrade'
12 | # * `failed-upgrade'
13 | # * `abort-install'
14 | # * `abort-install'
15 | # * `abort-upgrade'
16 | # * `disappear'
17 | #
18 | # for details, see http://www.debian.org/doc/debian-policy/ or
19 | # the debian-policy package
20 |
21 |
22 | case "$1" in
23 | purge)
24 | rm -rf /etc/oozie
25 | rm -rf /var/lib/oozie
26 | rm -rf /var/run/oozie
27 | rm -rf /var/tmp/oozie
28 | rm -rf /var/log/oozie
29 | update-rc.d -f oozie remove > /dev/null || exit 1
30 | ;;
31 | remove|upgrade|failed-upgrade|abort-install|abort-upgrade|disappear)
32 | ;;
33 |
34 | *)
35 | echo "postrm called with unknown argument \`$1'" >&2
36 | exit 1
37 | ;;
38 | esac
39 |
40 | exit 0
41 |
42 |
--------------------------------------------------------------------------------
/src/pkg/deb/oozie/oozie.preinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # preinst script for oozie
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `install'
10 | # * `install'
11 | # * `upgrade'
12 | # * `abort-upgrade'
13 | # for details, see http://www.debian.org/doc/debian-policy/ or
14 | # the debian-policy package
15 |
16 |
17 | case "$1" in
18 | install|upgrade)
19 | if ! getent passwd oozie >/dev/null; then
20 | # Adding system user: oozie.
21 | adduser \
22 | --system \
23 | --disabled-login \
24 | --group \
25 | --home /var/run/oozie \
26 | --gecos "Oozie User" \
27 | --shell /bin/false \
28 | oozie >/dev/null
29 | fi
30 | ;;
31 |
32 | abort-upgrade)
33 | ;;
34 |
35 | *)
36 | echo "preinst called with unknown argument \`$1'" >&2
37 | exit 1
38 | ;;
39 | esac
40 |
41 | # dh_installdeb will replace this with shell code automatically
42 | # generated by other debhelper scripts.
43 |
44 | #DEBHELPER#
45 |
46 | exit 0
47 |
--------------------------------------------------------------------------------
/src/pkg/deb/oozie/rules:
--------------------------------------------------------------------------------
1 | #!/usr/bin/make -f
2 | # -*- makefile -*-
3 |
4 | # Uncomment this to turn on verbose mode.
5 | export DH_VERBOSE=1
6 |
7 | # This has to be exported to make some magic below work.
8 | export DH_OPTIONS
9 |
10 | ifneq "$(wildcard ${HOME}/.m2/settings.xml)" ""
11 | m2_settings_flag=--settings ${HOME}/.m2/settings.xml
12 | else
13 | m2_settings_flag=
14 | endif
15 |
16 | patch: patch-stamp
17 | patch-stamp:
18 | touch $@
19 |
20 | clean:
21 | dh_testdir
22 | dh_testroot
23 | rm -f *-stamp
24 | dh_clean
25 | rm -Rf debian/tmp debian/oozie
26 | find debian -name .\*swp -exec rm -f {} \;
27 |
28 |
29 | # we need to set 'maven.repo.local' otherwise the build with
30 | # fakeroot fails because it tries to use /root/.m2
31 |
32 | build-indep: build-indep-stamp
33 | build-indep-stamp: patch-stamp
34 | # M2_CACHE=`mktemp -d /tmp/oozie.m2.XXXXX`
35 | mkdir -p distro/downloads
36 | (export DO_MAVEN_DEPLOY=""; export FULL_VERSION=2.3.1; sh -x debian/do-release-build -Dmaven.repo.local=${HOME}/.m2/repository ${m2_settings_flag} -DskipTests)
37 | # rm -rf ${M2_CACHE}
38 | mkdir -p debian/tmp
39 | tar cf - --exclude=debian/\* . | (cd debian/tmp && tar xf -)
40 | touch $@
41 |
42 | install: install-indep
43 | install-indep:
44 | dh_testdir
45 | dh_testroot
46 | sh -x debian/create-package-layout --extra-dir=debian/ --build-dir=. --server-dir=./debian/oozie --client-dir=./debian/oozie-client --docs-dir=./debian/oozie-client/usr/share/doc/oozie --initd-dir=./debian/oozie/etc/init.d
47 | dh_install -i
48 |
49 | binary-common:
50 | dh_testdir
51 | dh_testroot
52 | dh_installchangelogs
53 | dh_installdocs
54 | dh_installman
55 | dh_link
56 | dh_strip
57 | dh_compress
58 | dh_fixperms
59 | dh_makeshlibs
60 | dh_installinit
61 | dh_installdeb
62 | dh_shlibdeps
63 | dh_gencontrol
64 | dh_md5sums
65 | dh_builddeb
66 |
67 | binary-indep: build-indep install-indep
68 | $(MAKE) -f debian/rules DH_OPTIONS=-i binary-common
69 |
70 | binary-arch:
71 | binary: binary-indep
72 | .PHONY: build clean binary-indep binary install-indep binary-arc
73 |
--------------------------------------------------------------------------------
/src/pkg/deb/pig/changelog:
--------------------------------------------------------------------------------
1 | --- This is auto-generated
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/pig/compat:
--------------------------------------------------------------------------------
1 | 6
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/pig/control:
--------------------------------------------------------------------------------
1 | Source: hadoop-pig
2 | Section: misc
3 | Priority: extra
4 | Maintainer: Todd Lipcon
5 | Build-Depends: debhelper (>= 6), ant, sun-java6-jdk, ant-optional, git-core
6 | Standards-Version: 3.8.0
7 | Homepage: http://hadoop.apache.org/pig/
8 |
9 | Package: hadoop-pig
10 | Architecture: all
11 | Depends: sun-java6-jre, hadoop
12 | Description: A platform for analyzing large data sets using Hadoop
13 | Pig is a platform for analyzing large data sets that consists of a high-level language
14 | for expressing data analysis programs, coupled with infrastructure for evaluating these
15 | programs. The salient property of Pig programs is that their structure is amenable
16 | to substantial parallelization, which in turns enables them to handle very large data sets.
17 | .
18 | At the present time, Pig's infrastructure layer consists of a compiler that produces
19 | sequences of Map-Reduce programs, for which large-scale parallel implementations already
20 | exist (e.g., the Hadoop subproject). Pig's language layer currently consists of a textual
21 | language called Pig Latin, which has the following key properties:
22 | .
23 | * Ease of programming
24 | It is trivial to achieve parallel execution of simple, "embarrassingly parallel" data
25 | analysis tasks. Complex tasks comprised of multiple interrelated data transformations
26 | are explicitly encoded as data flow sequences, making them easy to write, understand,
27 | and maintain.
28 | * Optimization opportunities
29 | The way in which tasks are encoded permits the system to optimize their execution
30 | automatically, allowing the user to focus on semantics rather than efficiency.
31 | * Extensibility
32 | Users can create their own functions to do special-purpose processing.
33 |
--------------------------------------------------------------------------------
/src/pkg/deb/pig/copyright:
--------------------------------------------------------------------------------
1 | This package was debianized by Todd Lipcon on
2 | Tue, 24 Mar 2009 00:33:33 -0400.
3 |
4 | The upstream package is from
5 | The Cloudera Distribution for Hadoop is from
6 |
7 | License:
8 | Apache 2.0 - see /usr/share/common-licenses/Apache-2.0
9 |
10 | This Debian Package contains the Cloudera Distribution for Hadoop.
11 | This distribution is Copyright 2009, Cloudera, Inc. and is licensed under
12 | the Apache 2.0 License as above. See http://cloudera.com/distribution
13 |
14 | This product includes software developed by The Apache Software
15 | Foundation (http://www.apache.org/).
16 |
--------------------------------------------------------------------------------
/src/pkg/deb/pig/hadoop-pig.postinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postinst script for pig
3 |
4 | set -e
5 |
6 | case "$1" in
7 | configure)
8 | update-alternatives --install /etc/pig/conf pig-conf /etc/pig/conf.dist 30
9 | ;;
10 |
11 | abort-upgrade|abort-remove|abort-deconfigure)
12 | ;;
13 |
14 | *)
15 | echo "postinst called with unknown argument \`$1'" >&2
16 | exit 1
17 | ;;
18 | esac
19 |
20 | #DEBHELPER#
21 |
--------------------------------------------------------------------------------
/src/pkg/deb/pig/hadoop-pig.preinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # preinst script for pig
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this scVript can be called:
9 | # * `install'
10 | # * `install'
11 | # * `upgrade'
12 | # * `abort-upgrade'
13 | # for details, see http://www.debian.org/doc/debian-policy/ or
14 | # the debian-policy package
15 |
16 |
17 | case "$1" in
18 | install|upgrade)
19 | # workaround for https://issues.cloudera.org/browse/DISTRO-223
20 | if [ "$2" -a -d /usr/lib/pig/conf ]; then
21 | mv /usr/lib/pig/conf /usr/lib/pig/conf.old.`date +'%s'` || :
22 | fi
23 | ;;
24 |
25 | abort-upgrade)
26 | ;;
27 |
28 | *)
29 | echo "preinst called with unknown argument \`$1'" >&2
30 | exit 1
31 | ;;
32 | esac
33 |
34 | # dh_installdeb will replace this with shell code automatically
35 | # generated by other debhelper scripts.
36 |
37 | #DEBHELPER#
38 |
39 | exit 0
40 |
--------------------------------------------------------------------------------
/src/pkg/deb/pig/pig.dirs:
--------------------------------------------------------------------------------
1 | /usr/lib/pig
2 | /usr/bin
3 |
--------------------------------------------------------------------------------
/src/pkg/deb/pig/rules:
--------------------------------------------------------------------------------
1 | #!/usr/bin/make -f
2 | # -*- makefile -*-
3 |
4 | # Uncomment this to turn on verbose mode.
5 | #export DH_VERBOSE=1
6 |
7 | # This has to be exported to make some magic below work.
8 | export DH_OPTIONS
9 |
10 | patch: patch-stamp
11 | patch-stamp:
12 | touch $@
13 |
14 | clean:
15 | dh_testdir
16 | dh_testroot
17 | ant clean
18 | rm -f *-stamp
19 | dh_clean
20 | rm -Rf debian/tmp debian/hadoop-pig debian/pig
21 | find debian -name .\*swp -exec rm -f {} \;
22 |
23 | build-indep: build-indep-stamp
24 | build-indep-stamp: patch-stamp
25 | ant -Divy.home=`pwd`/debian/.ivy -Djava5.home=${JAVA5_HOME} -Dforrest.home=${FORREST_HOME} -Ddist.dir=debian/tmp package
26 | touch $@
27 |
28 | install: install-indep
29 | install-indep:
30 | dh_testdir
31 | dh_testroot
32 |
33 | cp debian/*.properties .
34 | cp debian/pig.1 .
35 | sh -x debian/install_pig.sh \
36 | --build-dir=debian/tmp \
37 | --prefix=debian/hadoop-pig \
38 | --doc-dir=debian/hadoop-pig/usr/share/doc/hadoop-pig \
39 | --examples-dir=debian/hadoop-pig/usr/share/doc/hadoop-pig/examples
40 |
41 | dh_install -i
42 | (dh_lintian) || /bin/true
43 |
44 | binary-common:
45 | dh_testdir
46 | dh_testroot
47 | dh_installchangelogs
48 | dh_installdocs
49 | # dh_installexamples
50 | # dh_installmenu
51 | # dh_installdebconf
52 | # dh_installlogrotate
53 | # dh_installemacsen
54 | # dh_installpam
55 | # dh_installmime
56 | # dh_python
57 | # dh_installinit
58 | # dh_installcron
59 | # dh_installinfo
60 | dh_installman
61 | dh_link
62 | dh_strip
63 | dh_compress
64 | dh_fixperms
65 | # dh_perl
66 | dh_makeshlibs
67 | dh_installdeb
68 | dh_shlibdeps
69 | dh_gencontrol
70 | dh_md5sums
71 | dh_builddeb
72 |
73 | binary-indep: build-indep install-indep
74 | $(MAKE) -f debian/rules DH_OPTIONS=-i binary-common
75 |
76 | binary-arch:
77 |
78 |
79 | binary: binary-indep
80 | .PHONY: build clean binary-indep binary install-indep binary-arch
81 |
--------------------------------------------------------------------------------
/src/pkg/deb/sqoop/changelog:
--------------------------------------------------------------------------------
1 | --- This is auto-generated
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/sqoop/compat:
--------------------------------------------------------------------------------
1 | 6
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/sqoop/control:
--------------------------------------------------------------------------------
1 | Source: sqoop
2 | Section: misc
3 | Priority: extra
4 | Maintainer: Alex Newman
5 | Build-Depends: debhelper (>= 6), ant, sun-java6-jdk, ant-optional, git-core, sed, asciidoc, xmlto
6 | Standards-Version: 3.8.0
7 | Homepage: http://www.cloudera.com
8 |
9 | Package: sqoop
10 | Architecture: all
11 | Depends: sun-java6-jre, hadoop
12 | Description: Tool for easy imports and exports of data sets between databases and HDFS
13 | Sqoop is a tool that provides the ability to import and export data sets between
14 | the Hadoop Distributed File System (HDFS) and relational databases.
15 |
16 | Package: sqoop-metastore
17 | Architecture: all
18 | Depends: sqoop (= ${source:Version}), adduser
19 | Description: Shared metadata repository for Sqoop.
20 | This optional package hosts a metadata server for Sqoop clients across a network to use.
21 |
22 |
--------------------------------------------------------------------------------
/src/pkg/deb/sqoop/copyright:
--------------------------------------------------------------------------------
1 | This package was debianized by Alex Newman on
2 | Mon, 22 Feb 2010 23:07:14 -0800
3 |
4 | The upstream package is from cloudera
5 | The Cloudera Distribution for Hadoop is from
6 |
7 | License:
8 | Apache 2.0 - see /usr/share/common-licenses/Apache-2.0
9 |
10 | This Debian Package contains the Cloudera Distribution for Hadoop.
11 | This distribution is Copyright 2010, Cloudera, Inc. and is licensed under
12 | the Apache 2.0 License as above. See http://cloudera.com/distribution
13 |
--------------------------------------------------------------------------------
/src/pkg/deb/sqoop/rules:
--------------------------------------------------------------------------------
1 | #!/usr/bin/make -f
2 | # -*- makefile -*-
3 |
4 | # Uncomment this to turn on verbose mode.
5 | export DH_VERBOSE=1
6 |
7 | # This has to be exported to make some magic below work.
8 | export DH_OPTIONS
9 |
10 | patch: patch-stamp
11 | patch-stamp:
12 | touch $@
13 |
14 | clean:
15 | dh_testdir
16 | dh_testroot
17 | rm -f *-stamp
18 | dh_clean
19 | rm -Rf debian/tmp debian/sqoop
20 | find debian -name .\*swp -exec rm -f {} \;
21 |
22 | build-indep: build-indep-stamp
23 | build-indep-stamp: patch-stamp
24 | ant -Divy.home=`pwd`/debian/.ivy -f build.xml package
25 | touch $@
26 |
27 | install: install-indep
28 | install-indep:
29 | dh_testdir
30 | dh_testroot
31 | sh -x debian/install_sqoop.sh \
32 | --build-dir=. \
33 | --prefix=debian/sqoop
34 | dh_install -i
35 |
36 | binary-common:
37 | dh_testdir
38 | dh_testroot
39 | dh_installchangelogs
40 | dh_installdocs
41 | dh_installman
42 | dh_link
43 | dh_strip
44 | dh_compress
45 | dh_fixperms
46 | dh_makeshlibs
47 | dh_installinit
48 | dh_installdeb
49 | dh_shlibdeps
50 | dh_gencontrol
51 | dh_md5sums
52 | dh_builddeb
53 |
54 | binary-indep: build-indep install-indep
55 | $(MAKE) -f debian/rules DH_OPTIONS=-i binary-common
56 |
57 | binary-arch:
58 | binary: binary-indep
59 | .PHONY: build clean binary-indep binary install-indep binary-arc
60 |
--------------------------------------------------------------------------------
/src/pkg/deb/sqoop/sqoop-metastore.postinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postinst script for sqoop
3 |
4 | set -e
5 |
6 | case "$1" in
7 | configure)
8 | ;;
9 |
10 | abort-upgrade|abort-remove|abort-deconfigure)
11 | ;;
12 |
13 | *)
14 | echo "postinst called with unknown argument \`$1'" >&2
15 | exit 1
16 | ;;
17 | esac
18 |
19 | #DEBHELPER#
20 |
--------------------------------------------------------------------------------
/src/pkg/deb/sqoop/sqoop-metastore.preinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # preinst script for sqoop-metastore
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `install'
10 | # * `install'
11 | # * `upgrade'
12 | # * `abort-upgrade'
13 | # for details, see http://www.debian.org/doc/debian-policy/ or
14 | # the debian-policy package
15 |
16 |
17 | case "$1" in
18 | install|upgrade)
19 | if ! getent passwd sqoop >/dev/null; then
20 | # Adding system user: sqoop .
21 | adduser \
22 | --system \
23 | --group \
24 | --home /var/lib/sqoop \
25 | --gecos "Sqoop User" \
26 | --shell /bin/false \
27 | sqoop >/dev/null
28 | fi
29 | install -d -m 0755 -o sqoop -g sqoop /var/lib/sqoop
30 | install -d -m 0755 -o sqoop -g sqoop /var/log/sqoop
31 | ;;
32 |
33 | abort-upgrade)
34 | ;;
35 |
36 | *)
37 | echo "preinst called with unknown argument \`$1'" >&2
38 | exit 1
39 | ;;
40 | esac
41 |
42 | # dh_installdeb will replace this with shell code automatically
43 | # generated by other debhelper scripts.
44 |
45 | #DEBHELPER#
46 |
47 | exit 0
48 |
--------------------------------------------------------------------------------
/src/pkg/deb/whirr/changelog:
--------------------------------------------------------------------------------
1 | --- This is auto-generated
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/whirr/compat:
--------------------------------------------------------------------------------
1 | 6
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/whirr/control:
--------------------------------------------------------------------------------
1 | Source: whirr
2 | Section: misc
3 | Priority: extra
4 | Maintainer: Tom White
5 | Build-Depends: debhelper (>= 6), sun-java6-jdk, git-core
6 | Standards-Version: 3.8.0
7 | Homepage: http://incubator.apache.org/whirr
8 |
9 | Package: whirr
10 | Architecture: all
11 | Depends: sun-java6-jre
12 | Description: Scripts and libraries for running software services on cloud infrastructure
13 | Whirr provides
14 | .
15 | * A cloud-neutral way to run services. You don't have to worry about the
16 | idiosyncrasies of each provider.
17 | * A common service API. The details of provisioning are particular to the
18 | service.
19 | * Smart defaults for services. You can get a properly configured system
20 | running quickly, while still being able to override settings as needed.
21 |
--------------------------------------------------------------------------------
/src/pkg/deb/whirr/copyright:
--------------------------------------------------------------------------------
1 | This package was debianized by Tom White on
2 | Mon, 22 Feb 2010 23:07:14 -0800
3 |
4 | The upstream package is from cloudera
5 | The Cloudera Distribution for Hadoop is from
6 |
7 | License:
8 | Apache 2.0 - see /usr/share/common-licenses/Apache-2.0
9 |
10 | This Debian Package contains the Cloudera Distribution for Hadoop.
11 | This distribution is Copyright 2010, Cloudera, Inc. and is licensed under
12 | the Apache 2.0 License as above. See http://cloudera.com/distribution
13 |
14 | This product includes software developed by The Apache Software
15 | Foundation (http://www.apache.org/).
--------------------------------------------------------------------------------
/src/pkg/deb/whirr/rules:
--------------------------------------------------------------------------------
1 | #!/usr/bin/make -f
2 | # -*- makefile -*-
3 |
4 | # Uncomment this to turn on verbose mode.
5 | export DH_VERBOSE=1
6 |
7 | # This has to be exported to make some magic below work.
8 | export DH_OPTIONS
9 |
10 | patch: patch-stamp
11 | patch-stamp:
12 | touch $@
13 |
14 | clean:
15 | dh_testdir
16 | dh_testroot
17 | rm -f *-stamp
18 | dh_clean
19 | rm -Rf debian/tmp debian/whirr
20 | find debian -name .\*swp -exec rm -f {} \;
21 |
22 | build-indep: build-indep-stamp
23 | build-indep-stamp: patch-stamp
24 | # we'll just use the build from the tarball.
25 | mkdir -p debian/tmp
26 | # NOTE: FIXME me right away
27 | cp -r debian/whirr.1 .
28 | tar cf - --exclude=debian/\* . | (cd debian/tmp && tar xf -)
29 | touch $@
30 |
31 | install: install-indep
32 | install-indep:
33 | dh_testdir
34 | dh_testroot
35 | sh -x debian/install_whirr.sh \
36 | --build-dir=debian/tmp \
37 | --prefix=debian/whirr
38 | (dh_lintian) || /bin/true
39 |
40 | binary-common:
41 | dh_testdir
42 | dh_testroot
43 | dh_installchangelogs
44 | dh_installdocs
45 | # dh_installexamples
46 | # dh_installmenu
47 | # dh_installdebconf
48 | # dh_installlogrotate
49 | # dh_installemacsen
50 | # dh_installpam
51 | # dh_installmime
52 | # dh_python
53 | # dh_installinit
54 | # dh_installcron
55 | # dh_installinfo
56 | dh_installman
57 | dh_link
58 | dh_strip
59 | dh_compress
60 | dh_fixperms
61 | # dh_perl
62 | dh_makeshlibs
63 | dh_installdeb
64 | dh_shlibdeps
65 | dh_gencontrol
66 | dh_md5sums
67 | dh_builddeb
68 |
69 | binary-indep: build-indep install-indep
70 | $(MAKE) -f debian/rules DH_OPTIONS=-i binary-common
71 |
72 | binary-arch:
73 |
74 |
75 | binary: binary-indep
76 | .PHONY: build clean binary-indep binary install-indep binary-arch
77 |
--------------------------------------------------------------------------------
/src/pkg/deb/zookeeper/changelog:
--------------------------------------------------------------------------------
1 | --- This is auto-generated
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/zookeeper/compat:
--------------------------------------------------------------------------------
1 | 6
2 |
--------------------------------------------------------------------------------
/src/pkg/deb/zookeeper/control:
--------------------------------------------------------------------------------
1 | Source: hadoop-zookeeper
2 | Section: misc
3 | Priority: extra
4 | Maintainer: Alex Newman
5 | Build-Depends: debhelper (>= 6), ant, sun-java6-jdk, ant-optional, git-core, autoconf, automake, subversion
6 | Standards-Version: 3.8.0
7 | Homepage: http://hadoop.apache.org/zookeeper/
8 |
9 | Package: hadoop-zookeeper
10 | Architecture: all
11 | Depends: sun-java6-jre
12 | Conflicts: zookeeper
13 | Replaces: zookeeper
14 | Description: A high-performance coordination service for distributed applications.
15 | ZooKeeper is a centralized service for maintaining configuration information, naming, providing distributed synchronization, and providing group services. All of these kinds of services are used in some form or another by distributed applications. Each time they are implemented there is a lot of work that goes into fixing the bugs and race conditions that are inevitable. Because of the difficulty of implementing these kinds of services, applications initially usually skimp on them ,which make them brittle in the presence of change and difficult to manage. Even when done correctly, different implementations of these services lead to management complexity when the applications are deployed.
16 |
17 | Package: hadoop-zookeeper-server
18 | Architecture: all
19 | Depends: hadoop-zookeeper (= ${source:Version})
20 | Description: This runs the zookeeper server on startup.
21 |
--------------------------------------------------------------------------------
/src/pkg/deb/zookeeper/copyright:
--------------------------------------------------------------------------------
1 | This package was debianized by Alex Newman on
2 | Mon, 22 Feb 2010 23:07:14 -0800
3 |
4 | The upstream package is from
5 | The Cloudera Distribution for Hadoop is from
6 |
7 | License:
8 | Apache 2.0 - see /usr/share/common-licenses/Apache-2.0
9 |
10 | This Debian Package contains the Cloudera Distribution for Hadoop.
11 | This distribution is Copyright 2009, Cloudera, Inc. and is licensed under
12 | the Apache 2.0 License as above. See http://cloudera.com/distribution
13 |
14 | This product includes software developed by The Apache Software
15 | Foundation (http://www.apache.org/).
16 |
--------------------------------------------------------------------------------
/src/pkg/deb/zookeeper/hadoop-zookeeper.postinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # postinst script for zookeeper
3 |
4 | set -e
5 |
6 | case "$1" in
7 | configure)
8 | # Install config alternatives
9 | update-alternatives --install /etc/zookeeper/conf hadoop-zookeeper-conf /etc/zookeeper/conf.dist 30
10 | ;;
11 |
12 | abort-upgrade|abort-remove|abort-deconfigure)
13 | ;;
14 |
15 | *)
16 | echo "postinst called with unknown argument \`$1'" >&2
17 | exit 1
18 | ;;
19 | esac
20 |
21 | #DEBHELPER#
22 |
--------------------------------------------------------------------------------
/src/pkg/deb/zookeeper/hadoop-zookeeper.preinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # preinst script for zookeeper
3 | #
4 | # see: dh_installdeb(1)
5 |
6 | set -e
7 |
8 | # summary of how this script can be called:
9 | # * `install'
10 | # * `install'
11 | # * `upgrade'
12 | # * `abort-upgrade'
13 | # for details, see http://www.debian.org/doc/debian-policy/ or
14 | # the debian-policy package
15 |
16 |
17 | case "$1" in
18 | install|upgrade)
19 | if ! getent passwd zookeeper >/dev/null; then
20 | # Adding system user: zookeeper .
21 | adduser \
22 | --system \
23 | --group \
24 | --home /var/zookeeper \
25 | --gecos "ZooKeeper User" \
26 | --shell /bin/false \
27 | zookeeper >/dev/null
28 | fi
29 | install -d -m 0755 -o zookeeper -g zookeeper /var/log/zookeeper
30 | install -d -m 0755 -o zookeeper -g zookeeper /var/lib/zookeeper
31 | ;;
32 |
33 | abort-upgrade)
34 | ;;
35 |
36 | *)
37 | echo "preinst called with unknown argument \`$1'" >&2
38 | exit 1
39 | ;;
40 | esac
41 |
42 | # dh_installdeb will replace this with shell code automatically
43 | # generated by other debhelper scripts.
44 |
45 | #DEBHELPER#
46 |
47 | exit 0
48 |
--------------------------------------------------------------------------------
/src/pkg/deb/zookeeper/rules:
--------------------------------------------------------------------------------
1 | #!/usr/bin/make -f
2 | # -*- makefile -*-
3 |
4 | # Uncomment this to turn on verbose mode.
5 | export DH_VERBOSE=1
6 |
7 | # This has to be exported to make some magic below work.
8 | export DH_OPTIONS
9 |
10 | patch: patch-stamp
11 | patch-stamp:
12 | touch $@
13 |
14 | clean:
15 | dh_testdir
16 | dh_testroot
17 | rm -f *-stamp
18 | dh_clean
19 | rm -Rf debian/tmp debian/hadoop-zookeeper debian/zookeeper
20 | find debian -name .\*swp -exec rm -f {} \;
21 |
22 | build-indep: build-indep-stamp
23 | build-indep-stamp: patch-stamp
24 | ant -f build.xml package -Divy.home=`pwd`/debian/.ivy
25 | mkdir -p debian/tmp
26 | tar cf - --exclude=debian/\* . | (cd debian/tmp && tar xf -)
27 | touch $@
28 |
29 | install: install-indep
30 | install-indep:
31 | dh_testdir
32 | dh_testroot
33 | cp debian/zookeeper.1 .
34 | sh -x debian/install_zookeeper.sh \
35 | --build-dir=. \
36 | --prefix=debian/hadoop-zookeeper
37 | dh_install -i
38 |
39 | binary-common:
40 | dh_testdir
41 | dh_testroot
42 | dh_installchangelogs
43 | dh_installdocs
44 | dh_installman
45 | dh_link
46 | dh_strip
47 | dh_compress
48 | dh_fixperms
49 | dh_makeshlibs
50 | dh_installinit
51 | dh_installdeb
52 | dh_shlibdeps
53 | dh_gencontrol
54 | dh_md5sums
55 | dh_builddeb
56 |
57 | binary-indep: build-indep install-indep
58 | $(MAKE) -f debian/rules DH_OPTIONS=-i binary-common
59 |
60 | binary-arch:
61 | binary: binary-indep
62 | .PHONY: build clean binary-indep binary install-indep binary-arc
63 |
--------------------------------------------------------------------------------
/src/pkg/rpm/flume/RPMS/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/flume/RPMS/.gitignore
--------------------------------------------------------------------------------
/src/pkg/rpm/flume/SRPMS/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/flume/SRPMS/.gitignore
--------------------------------------------------------------------------------
/src/pkg/rpm/hadoop/RPMS/.gitignore:
--------------------------------------------------------------------------------
1 | repodata
2 |
--------------------------------------------------------------------------------
/src/pkg/rpm/hadoop/SPECS/.gitignore:
--------------------------------------------------------------------------------
1 | hadoop.spec
2 | pig.spec
3 | hive.spec
4 |
--------------------------------------------------------------------------------
/src/pkg/rpm/hbase/RPMS/.gitignore:
--------------------------------------------------------------------------------
1 | repodata
2 |
--------------------------------------------------------------------------------
/src/pkg/rpm/hbase/SOURCES/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/hbase/SOURCES/.gitignore
--------------------------------------------------------------------------------
/src/pkg/rpm/hbase/SOURCES/hbase.default:
--------------------------------------------------------------------------------
1 | export HBASE_PID_DIR="/var/run/hbase"
2 | export HBASE_LOG_DIR="/var/log/hbase"
3 | export HBASE_IDENT_STRING=hbase
4 |
--------------------------------------------------------------------------------
/src/pkg/rpm/hbase/SPECS/.gitignore:
--------------------------------------------------------------------------------
1 | hadoop.spec
2 | pig.spec
3 | hive.spec
4 |
--------------------------------------------------------------------------------
/src/pkg/rpm/hbase/SRPMS/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/hbase/SRPMS/.gitignore
--------------------------------------------------------------------------------
/src/pkg/rpm/hive/RPMS/.gitignore:
--------------------------------------------------------------------------------
1 | repodata
2 |
--------------------------------------------------------------------------------
/src/pkg/rpm/hive/SPECS/.gitignore:
--------------------------------------------------------------------------------
1 | hadoop.spec
2 | pig.spec
3 | hive.spec
4 |
--------------------------------------------------------------------------------
/src/pkg/rpm/oozie/RPMS/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/oozie/RPMS/.gitignore
--------------------------------------------------------------------------------
/src/pkg/rpm/oozie/SOURCES/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/oozie/SOURCES/.gitignore
--------------------------------------------------------------------------------
/src/pkg/rpm/oozie/SRPMS/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/oozie/SRPMS/.gitignore
--------------------------------------------------------------------------------
/src/pkg/rpm/pig/RPMS/.gitignore:
--------------------------------------------------------------------------------
1 | repodata
2 |
--------------------------------------------------------------------------------
/src/pkg/rpm/pig/SPECS/.gitignore:
--------------------------------------------------------------------------------
1 | hadoop.spec
2 | pig.spec
3 | hive.spec
4 |
--------------------------------------------------------------------------------
/src/pkg/rpm/sqoop/BUILD/.gitignore:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/src/pkg/rpm/sqoop/RPMS/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/sqoop/RPMS/.gitignore
--------------------------------------------------------------------------------
/src/pkg/rpm/sqoop/SOURCES/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/sqoop/SOURCES/.gitignore
--------------------------------------------------------------------------------
/src/pkg/rpm/sqoop/SRPMS/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/sqoop/SRPMS/.gitignore
--------------------------------------------------------------------------------
/src/pkg/rpm/whirr/BUILD/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/whirr/BUILD/.gitignore
--------------------------------------------------------------------------------
/src/pkg/rpm/whirr/RPMS/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/whirr/RPMS/.gitignore
--------------------------------------------------------------------------------
/src/pkg/rpm/whirr/SOURCES/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/whirr/SOURCES/.gitignore
--------------------------------------------------------------------------------
/src/pkg/rpm/whirr/SPECS/whirr.spec:
--------------------------------------------------------------------------------
1 | %define lib_whirr /usr/lib/whirr
2 | %define man_dir /usr/share/man
3 |
4 | # disable repacking jars
5 | %define __os_install_post %{nil}
6 |
7 | Name: whirr
8 | Version: %{whirr_version}
9 | Release: %{whirr_release}
10 | Summary: Scripts and libraries for running software services on cloud infrastructure.
11 | URL: http://incubator.apache.org/whirr
12 | Group: Development/Libraries
13 | BuildArch: noarch
14 | Buildroot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
15 | License: ASL 2.0
16 | Source0: %{name}-%{whirr_base_version}-incubating-src.tar.gz
17 | Source1: install_%{name}.sh
18 | Source2: whirr.1
19 |
20 | # RHEL6 provides natively java
21 | %if 0%{?rhel} == 6
22 | BuildRequires: java-1.6.0-sun-devel
23 | Requires: java-1.6.0-sun
24 | %else
25 | BuildRequires: jdk >= 1.6
26 | Requires: jre >= 1.6
27 | %endif
28 |
29 |
30 | %description
31 | Whirr provides
32 |
33 | * A cloud-neutral way to run services. You don't have to worry about the
34 | idiosyncrasies of each provider.
35 | * A common service API. The details of provisioning are particular to the
36 | service.
37 | * Smart defaults for services. You can get a properly configured system
38 | running quickly, while still being able to override settings as needed.
39 |
40 |
41 | %prep
42 | %setup -n %{name}-%{whirr_base_version}-incubating
43 |
44 | %build
45 |
46 | mvn clean source:jar install assembly:assembly -Pjavadoc site
47 |
48 | %install
49 | %__rm -rf $RPM_BUILD_ROOT
50 | cp $RPM_SOURCE_DIR/whirr.1 .
51 | sh $RPM_SOURCE_DIR/install_whirr.sh \
52 | --build-dir=. \
53 | --prefix=$RPM_BUILD_ROOT
54 |
55 | %files
56 | %defattr(-,root,root)
57 | %attr(0755,root,root) %{lib_whirr}
58 | %attr(0755,root,root) %{_bindir}/%{name}
59 | %attr(0644,root,root) %{man_dir}/man1/whirr.1.gz
60 |
61 |
--------------------------------------------------------------------------------
/src/pkg/rpm/whirr/SRPMS/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/whirr/SRPMS/.gitignore
--------------------------------------------------------------------------------
/src/pkg/rpm/zookeeper/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/zookeeper/.gitignore
--------------------------------------------------------------------------------
/src/pkg/rpm/zookeeper/RPMS/.gitignore:
--------------------------------------------------------------------------------
1 | repodata
2 |
--------------------------------------------------------------------------------
/src/pkg/rpm/zookeeper/SPECS/.gitignore:
--------------------------------------------------------------------------------
1 | hadoop.spec
2 | pig.spec
3 | hive.spec
4 |
--------------------------------------------------------------------------------
/src/pkg/rpm/zookeeper/SRPMS/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/src/pkg/rpm/zookeeper/SRPMS/.gitignore
--------------------------------------------------------------------------------
/src/repos/distributions:
--------------------------------------------------------------------------------
1 | Origin: Bigtop
2 | Label: Bigtop
3 | Suite: stable
4 | Codename: bigtop
5 | Version: 0.1
6 | Architectures: i386 amd64 source
7 | Components: contrib
8 | Description: Bigtop
9 |
--------------------------------------------------------------------------------
/test/MANIFEST.txt:
--------------------------------------------------------------------------------
1 | ASF License 2.0
2 | org.codehaus.groovy.maven.runtime:gmaven-runtime-1.6:jar:1.0 GMaven
3 | commons-logging:commons-logging:jar:1.1 ASF
4 | org.apache.ant:ant-junit:jar:1.8.2 ASF
5 | org.apache.pig:pigsmoke:jar:0.8.0-SNAPSHOT ASF
6 | org.apache.hadoop:hadoop-core:jar:0.20.2-cdh3u0 ASF
7 |
8 | Public Code License 1.0
9 | junit:junit:jar:4.8.1 JUnit
10 |
11 | Developed by Cloudera
12 | com.cloudera.sqoop:sqoop:jar:1.2.0-cdh3u0
13 | com.cloudera.itest:smoke-tests-conf:jar:1.1-SNAPSHOT
14 | com.cloudera.itest:itest-common:jar:1.1-SNAPSHOT
15 | com.cloudera.itest:hivesmoke:test-jar:tests:0.7.0-cdh3u1-SNAPSHOT
16 | com.cloudera.itest:smoke-tests-conf:jar:1.1-SNAPSHOT
17 | com.cloudera.itest:itest-common:jar:1.1-SNAPSHOT
18 | com.cloudera.itest:hadoopsmoke:test-jar:tests:0.20.2-cdh3u1-SNAPSHOT
19 | com.cloudera.itest:ooziesmoke:test-jar:tests:2.3.0-cdh3u1-SNAPSHOT
20 | com.cloudera.itest:flumesmoke:test-jar:tests:0.9.3-cdh3u1-SNAPSHOT
21 | com.cloudera.itest:hbasesmoke:test-jar:tests:0.90.1-cdh3u1-SNAPSHOT
22 |
--------------------------------------------------------------------------------
/test/NOTICE.txt:
--------------------------------------------------------------------------------
1 | Copyright (c) 2011, Cloudera, Inc. All Rights Reserved.
2 |
--------------------------------------------------------------------------------
/test/site/src/site/apt/devguide.apt:
--------------------------------------------------------------------------------
1 | How to contribute to iTest
2 | ~~~~~~~~~~~~~~~~~~
3 |
4 | The project is licenced under Apache (ASF) 2.0 license.
5 |
6 | At the moment it is hosted on github.com social coding network.
7 |
8 | If you find this project interesting and want to contribute your time, ideas,
9 | code, documentation writing skills, testing cycles - you are very welcome!
10 |
11 | Contributing is easy! Simply go to the {{{http://github.com/cloudera/iTest}project git repository}},
12 | fork it and go ahead! GitHub provides a nice collaboration feature called
13 | 'pull request' where one case easily offer new code to be included into
14 | mainstream code base upon successful code review and discussion.
15 |
16 | If you find a problem with iTest, or have a feature request you can
17 | {{{https://github.com/cloudera/iTest/issues}open new issue}}
18 |
19 | So, jump on and code with us! It is fun!
20 |
21 |
--------------------------------------------------------------------------------
/test/site/src/site/apt/downloads.apt:
--------------------------------------------------------------------------------
1 | -----
2 | download iTest
3 | -----
4 |
5 | Getting iTest
6 | ~~~~~~~~~~~~~~
7 |
8 | * Download {binary} artifact.
9 |
10 | iTest can be obtained in a form of binary maven artifact from Cloudera Maven
11 | repository. The repository is located at repository.cloudera.com
12 |
13 | All you need to do is to add the following dependency into your project pom
14 |
15 | +--------------------------------------+
16 |
17 | com.cloudera.itest
18 | itest-common
19 | 1.0-SNAPSHOT
20 |
21 | +--------------------------------------+
22 |
23 | * Download {sources}
24 |
25 | You can simply get a source artifact from the same Maven repository
26 |
27 | +--------------------------------------+
28 |
29 | com.cloudera.itest
30 | itest-common
31 | 1.0-SNAPSHOT
32 | sources
33 |
34 | +--------------------------------------+
35 |
36 | Or you can clone iTest workspace from github.com/cloudera/iTest using {{{http://git-scm.com/}git}}
37 |
38 |
--------------------------------------------------------------------------------
/test/site/src/site/apt/index.apt:
--------------------------------------------------------------------------------
1 | -----
2 | iTest site
3 | -----
4 | iTest Team
5 | -----
6 |
7 | Welcome to iTest project
8 | ~~~~~
9 |
10 | The purpose of the project is to ease the development of stack validation
11 | functionality and hide certain aspects of stack from development and quality
12 | engineers alike and let them to focus on what they do best:
13 | developing and testing a software.
14 |
15 | iTest introduces a necessary decoupling (along with proper versioning and
16 | dependency tracing of test artifacts) between software product or component and
17 | a test artifacts which are developed in order to test certain aspects of its
18 | functionality.
19 |
20 | As of version 1.0 iTest provides means and helper functionality to bring
21 | software stack testing to the next level where test functionality are defined
22 | in a form of self-contained artifacts (including behavior and data, or
23 | declaring dependency to versioned data).
24 |
25 |
--------------------------------------------------------------------------------
/test/site/src/site/apt/itest.apt:
--------------------------------------------------------------------------------
1 | -----
2 | iTest details
3 | -----
4 |
5 | iTest project details
6 |
7 | * iTest application scope
8 |
9 | iTest doesn't have narrowly defined scope of application. However, it is
10 | reasonable to use for whatever from integration testing and higher in the chain:
11 |
12 | * integration
13 |
14 | * system
15 |
16 | * load
17 |
18 | * reliability
19 |
20 | * benchmarking
21 |
22 | * iTest architecture
23 |
24 | The framework provides the following components:
25 |
26 | * shell support
27 |
28 | This module provides easy to use primitives to quickly execute a shell-out
29 | command or statement. Return code of the execution as well as content of
30 | standard input/error are available through properties.
31 |
32 | * package management support
33 |
34 | This module defines an abstract set of commands to work with native Linux
35 | packages (deb, rpm, etc.) and some concrete implementations. A user can
36 | write a test code which manipulates with native packages - install, remove,
37 | update, etc. - which will work on any of supported platforms.
38 |
39 | * jar utils
40 |
41 | The module implements certain helper functionality to ease manipulations with
42 | jar files (such as Maven artifacts).
43 |
44 | * iTest technology
45 |
46 | iTest is written in {{{http://groovy.codehaus.org}Groovy}}
47 |
48 | iTest facilitate certain operations for Maven based projects. However, it isn't
49 | specific to Maven and can be used as a standalone framework.
50 |
--------------------------------------------------------------------------------
/test/site/src/site/resources/images/banner.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/test/site/src/site/resources/images/banner.png
--------------------------------------------------------------------------------
/test/site/src/site/resources/images/itest.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/test/site/src/site/resources/images/itest.png
--------------------------------------------------------------------------------
/test/src/integration/sqoop/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 |
8 | com.cloudera.itest
9 | cdh-smokes
10 | 1.0-cdh3u1-SNAPSHOT
11 |
12 | com.cloudera.itest
13 | sqoop-integration
14 | 1.2.0-cdh3u1-SNAPSHOT
15 | sqoop-integration
16 |
17 |
--------------------------------------------------------------------------------
/test/src/integration/sqoop/src/test/resources/hbase-sqoop/create-table.hxt:
--------------------------------------------------------------------------------
1 | create 'test_table', 'data'
2 |
--------------------------------------------------------------------------------
/test/src/integration/sqoop/src/test/resources/hbase-sqoop/drop-table.hxt:
--------------------------------------------------------------------------------
1 | disable 'test_table'
2 | drop 'test_table'
3 |
--------------------------------------------------------------------------------
/test/src/integration/sqoop/src/test/resources/hbase-sqoop/expected-hbase-output.txt:
--------------------------------------------------------------------------------
1 | 10 value=ten
2 | 11 value=eleven
3 | 12 value=twelve
4 | 1 value=one
5 | 2 value=two
6 | 3 value=three
7 | 4 value=four
8 | 5 value=five
9 | 6 value=six
10 | 7 value=seven
11 | 8 value=eight
12 | 9 value=nine
13 |
--------------------------------------------------------------------------------
/test/src/integration/sqoop/src/test/resources/hbase-sqoop/mysql-create-db.sql:
--------------------------------------------------------------------------------
1 | #
2 | # Run this script once as root user before the test run. For example:
3 | # mysql -u root -p < /path/to/this/script.sql
4 | #
5 |
6 | #
7 | # Drop old databases
8 | #
9 | drop database if exists testhbase;
10 |
11 | #
12 | # Create new database
13 | #
14 | create database testhbase;
15 |
16 | #
17 | # Grant permissions to the testhbaseuser
18 | #
19 | use mysql;
20 | grant all privileges on testhbase.* to 'testhbaseuser'@'localhost';
21 | grant all privileges on testhbase.* to 'testhbaseuser'@'%';
22 | grant all privileges on testhbase.* to 'root'@'%';
23 | flush privileges;
24 |
--------------------------------------------------------------------------------
/test/src/integration/sqoop/src/test/resources/hbase-sqoop/mysql-load-db.sql:
--------------------------------------------------------------------------------
1 | #
2 | # Run this script as testhbaseuser, specifically for the testhbase database.
3 | # This script must be run after the mysql-create-db.sql has been run as root.
4 | # Example of command to run this script:
5 | # mysql testhbase -u testhbaseuser < /path/to/this/script.sql
6 | #
7 |
8 | #
9 | # Drop test_table
10 | #
11 | drop table if exists test_table;
12 |
13 | #
14 | # Create test_table
15 | #
16 | create table test_table (a integer primary key, b varchar(32));
17 |
18 | #
19 | # Load table data
20 | #
21 | insert into test_table values (1, 'one'), (2, 'two'), (3, 'three'), (4, 'four'),
22 | (5, 'five'), (6, 'six'), (7, 'seven'), (8, 'eight'), (9, 'nine'), (10, 'ten'),
23 | (11, 'eleven'), (12, 'twelve');
24 |
--------------------------------------------------------------------------------
/test/src/integration/sqoop/src/test/resources/hbase-sqoop/select-table.hxt:
--------------------------------------------------------------------------------
1 | scan 'test_table'
2 |
--------------------------------------------------------------------------------
/test/src/integration/sqoop/src/test/resources/hive-sqoop/expected-hive-output.txt:
--------------------------------------------------------------------------------
1 | 1 one
2 | 2 two
3 | 3 three
4 | 4 four
5 | 5 five
6 | 6 six
7 | 7 seven
8 | 8 eight
9 | 9 nine
10 | 10 ten
11 | 11 eleven
12 | 12 twelve
13 |
--------------------------------------------------------------------------------
/test/src/integration/sqoop/src/test/resources/hive-sqoop/hive-drop-table.hql:
--------------------------------------------------------------------------------
1 | drop table if exists test_table
2 |
--------------------------------------------------------------------------------
/test/src/integration/sqoop/src/test/resources/hive-sqoop/hive-select-table.hql:
--------------------------------------------------------------------------------
1 | select * from test_table order by a
2 |
--------------------------------------------------------------------------------
/test/src/integration/sqoop/src/test/resources/hive-sqoop/mysql-create-db.sql:
--------------------------------------------------------------------------------
1 | #
2 | # Run this script once as root user before the test run. For example:
3 | # mysql -u root -p < /path/to/this/script.sql
4 | #
5 |
6 | #
7 | # Drop old databases
8 | #
9 | drop database if exists testhive;
10 |
11 | #
12 | # Create new database
13 | #
14 | create database testhive;
15 |
16 | #
17 | # Grant permissions to the testhiveuser
18 | #
19 | use mysql;
20 | grant all privileges on testhive.* to 'testhiveuser'@'localhost';
21 | grant all privileges on testhive.* to 'testhiveuser'@'%';
22 | grant all privileges on testhive.* to 'root'@'%';
23 | flush privileges;
24 |
--------------------------------------------------------------------------------
/test/src/integration/sqoop/src/test/resources/hive-sqoop/mysql-load-db.sql:
--------------------------------------------------------------------------------
1 | #
2 | # Run this script as testhiveuser, specifically for the testhive database.
3 | # This script must be run after the mysql-create-db.sql has been run as root.
4 | # Example of command to run this script:
5 | # mysql testhive -u testhiveuser < /path/to/this/script.sql
6 | #
7 |
8 | #
9 | # Drop test_table
10 | #
11 | drop table if exists test_table;
12 |
13 | #
14 | # Create test_table
15 | #
16 | create table test_table (a integer primary key, b varchar(32));
17 |
18 | #
19 | # Load table data
20 | #
21 | insert into test_table values (1, 'one'), (2, 'two'), (3, 'three'), (4, 'four'),
22 | (5, 'five'), (6, 'six'), (7, 'seven'), (8, 'eight'), (9, 'nine'), (10, 'ten'),
23 | (11, 'eleven'), (12, 'twelve');
24 |
--------------------------------------------------------------------------------
/test/src/itest-common/README:
--------------------------------------------------------------------------------
1 | This project will develop a set of JarRunner adapters to perform the following
2 | tasks via specific adapters:
3 | - provide a list of files within a jar file (artifact) which can be passed to a test
4 | framework such as JUnit for further execution (generic adapter)
5 | - request a list of tests from an artifact to be execute by an external
6 | framework (specific adapter)
7 | - get a list of tests within an artifact, supply them with certain arguments
8 | and run as POJO applications (concrete adapter: an extension of generic)
9 |
10 | This project contains the experimental foundation of JarRunner and a
11 | meta-service utilizing it to perform tests execution. These two components
12 | might be joined together later.
13 |
--------------------------------------------------------------------------------
/test/src/itest-common/src/main/groovy/com/cloudera/itest/TestListUtils.groovy:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2011, Cloudera, Inc. All Rights Reserved.
3 | *
4 | * Cloudera, Inc. licenses this file to you under the Apache License,
5 | * Version 2.0 (the "License"). You may not use this file except in
6 | * compliance with the License. You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
11 | * CONDITIONS OF ANY KIND, either express or implied. See the License for
12 | * the specific language governing permissions and limitations under the
13 | * License.
14 | */
15 | package com.cloudera.itest
16 |
17 | public class TestListUtils {
18 |
19 | /**
20 | * Touching files to force Surefire plugin to pick them up
21 | * @param pathName
22 | * @throws IOException
23 | */
24 | static final def FS = System.getProperty('file.separator', '/');
25 |
26 | static void touchTestFiles(String dirPrefix, String pathName) throws IOException {
27 | if (!pathName.endsWith('.class')) {
28 | return;
29 | }
30 |
31 | List pathArray = pathName.split(FS).toList();
32 | def prefix = "";
33 | if (dirPrefix != null)
34 | prefix = dirPrefix;
35 |
36 | String fileName =
37 | pathArray.remove(pathArray.size() - 1).replaceAll('.class', '.touched')
38 | String dirName = prefix + FS + pathArray.join(FS)
39 |
40 | File dir = new File(dirName);
41 | dir.mkdirs();
42 | File file = new File(dirName, fileName);
43 | file.createNewFile();
44 |
45 | assert file.exists();
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/test/src/itest-common/src/main/groovy/com/cloudera/itest/pmanager/ManagedPackage.groovy:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2011, Cloudera, Inc. All Rights Reserved.
3 | *
4 | * Cloudera, Inc. licenses this file to you under the Apache License,
5 | * Version 2.0 (the "License"). You may not use this file except in
6 | * compliance with the License. You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
11 | * CONDITIONS OF ANY KIND, either express or implied. See the License for
12 | * the specific language governing permissions and limitations under the
13 | * License.
14 | */
15 | package com.cloudera.itest.pmanager
16 |
17 | import com.cloudera.itest.posix.Service
18 |
19 | abstract class ManagedPackage extends PackageInstance {
20 | public boolean isInstalled() {
21 | return mgr.isInstalled(this);
22 | }
23 |
24 | public int install() {
25 | return mgr.install(this);
26 | }
27 |
28 | public int remove() {
29 | return mgr.remove(this);
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/test/src/itest-common/src/main/groovy/com/cloudera/itest/shell/OS.groovy:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2011, Cloudera, Inc. All Rights Reserved.
3 | *
4 | * Cloudera, Inc. licenses this file to you under the Apache License,
5 | * Version 2.0 (the "License"). You may not use this file except in
6 | * compliance with the License. You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
11 | * CONDITIONS OF ANY KIND, either express or implied. See the License for
12 | * the specific language governing permissions and limitations under the
13 | * License.
14 | */
15 | package com.cloudera.itest.shell
16 |
17 | /**
18 | * This class provides various constants describing an odd collection
19 | * of facts about and OS that we're running on. It was inspired by
20 | * Puppet's Facter and perhaps should be renamed sometime in the future ;-)
21 | */
22 |
23 | class OS {
24 | public static boolean isLinux;
25 | public static String linux_flavor = "vanilla";
26 | public static String linux_codename = "plan9";
27 | public static String linux_release = "1.0";
28 |
29 | static {
30 | isLinux = (System.getProperty('os.name') =~ /(?i)linux/).matches();
31 |
32 | if (isLinux) {
33 | linux_flavor = "lsb_release -i -s".execute().text.trim();
34 | linux_codename = "lsb_release -c -s".execute().text.trim();
35 | linux_release = "lsb_release -r -s".execute().text.trim();
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/test/src/itest-common/src/test/groovy/com/cloudera/itest/DummyTestError.groovy:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2011, Cloudera, Inc. All Rights Reserved.
3 | *
4 | * Cloudera, Inc. licenses this file to you under the Apache License,
5 | * Version 2.0 (the "License"). You may not use this file except in
6 | * compliance with the License. You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
11 | * CONDITIONS OF ANY KIND, either express or implied. See the License for
12 | * the specific language governing permissions and limitations under the
13 | * License.
14 | */
15 | package com.cloudera.itest
16 |
17 | import org.junit.Test
18 |
19 | public class DummyTestError {
20 | @Test
21 | void testAlwaysPass() {
22 | throw new IOException()
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/test/src/itest-common/src/test/groovy/com/cloudera/itest/DummyTestFail.groovy:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2011, Cloudera, Inc. All Rights Reserved.
3 | *
4 | * Cloudera, Inc. licenses this file to you under the Apache License,
5 | * Version 2.0 (the "License"). You may not use this file except in
6 | * compliance with the License. You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
11 | * CONDITIONS OF ANY KIND, either express or implied. See the License for
12 | * the specific language governing permissions and limitations under the
13 | * License.
14 | */
15 | package com.cloudera.itest
16 |
17 | import static org.junit.Assert.*
18 | import org.junit.Test
19 |
20 | public class DummyTestFail {
21 | @Test
22 | void testAlwaysPass() {
23 | assertTrue('Constant failure', false)
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/test/src/itest-common/src/test/groovy/com/cloudera/itest/DummyTestPass.groovy:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2011, Cloudera, Inc. All Rights Reserved.
3 | *
4 | * Cloudera, Inc. licenses this file to you under the Apache License,
5 | * Version 2.0 (the "License"). You may not use this file except in
6 | * compliance with the License. You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
11 | * CONDITIONS OF ANY KIND, either express or implied. See the License for
12 | * the specific language governing permissions and limitations under the
13 | * License.
14 | */
15 | package com.cloudera.itest
16 |
17 | import org.junit.Test
18 |
19 | public class DummyTestPass {
20 | @Test
21 | void testAlwaysPass() {
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/test/src/itest-common/src/test/groovy/com/cloudera/itest/TestListUtilsTest.groovy:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2011, Cloudera, Inc. All Rights Reserved.
3 | *
4 | * Cloudera, Inc. licenses this file to you under the Apache License,
5 | * Version 2.0 (the "License"). You may not use this file except in
6 | * compliance with the License. You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
11 | * CONDITIONS OF ANY KIND, either express or implied. See the License for
12 | * the specific language governing permissions and limitations under the
13 | * License.
14 | */
15 | package com.cloudera.itest
16 |
17 | import org.junit.Test
18 | import static org.junit.Assert.assertTrue
19 |
20 | public class TestListUtilsTest {
21 | @Test
22 | void testListUtils() {
23 | def prefix = 'tmp';
24 | def fileName = 'dir/under/which/file/created';
25 | File expectedFile = new File("${prefix}/${fileName}.touched");
26 |
27 | TestListUtils.touchTestFiles(prefix, "${fileName}.class");
28 | assertTrue("${fileName}.touched is missing", expectedFile.exists());
29 | expectedFile.delete();
30 |
31 | TestListUtils.touchTestFiles(prefix, "${fileName}.xml");
32 | assertTrue("only .class files are expected to be created",
33 | expectedFile.getParentFile().listFiles().size() == 0);
34 |
35 | File p = new File(prefix);
36 | p.deleteDir();
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/test/src/itest-common/src/test/groovy/com/cloudera/itest/junit/OrderedParameterizedTest.groovy:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2011, Cloudera, Inc. All Rights Reserved.
3 | *
4 | * Cloudera, Inc. licenses this file to you under the Apache License,
5 | * Version 2.0 (the "License"). You may not use this file except in
6 | * compliance with the License. You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
11 | * CONDITIONS OF ANY KIND, either express or implied. See the License for
12 | * the specific language governing permissions and limitations under the
13 | * License.
14 | */
15 | package com.cloudera.itest.junit
16 |
17 | import org.junit.runner.RunWith
18 | import org.junit.Test
19 | import com.cloudera.itest.junit.OrderedParameterized.RunStage
20 | import org.junit.runners.Parameterized.Parameters
21 | import org.junit.AfterClass
22 | import static org.junit.Assert.assertEquals
23 |
24 | @RunWith(OrderedParameterized)
25 | class OrderedParameterizedTest {
26 | int parameter;
27 | static List order = [];
28 |
29 | @RunStage(level=1)
30 | @Test
31 | public void lateTest() {
32 | order.add(1);
33 | }
34 |
35 | @RunStage(level=-1)
36 | @Test
37 | public void earlyTest() {
38 | order.add(-1);
39 | }
40 |
41 | @Test
42 | public void defaultTest() {
43 | order.add(0);
44 | }
45 |
46 | OrderedParameterizedTest(int p) {
47 | parameter = p;
48 | }
49 |
50 | @Parameters
51 | public static Map generateTests() {
52 | HashMap res = new HashMap();
53 | res.put("test name", [1] as Object[]);
54 | return res;
55 | }
56 |
57 | @AfterClass
58 | static void verifyOrder() {
59 | assertEquals("tests were NOT executed in the desired order",
60 | [-1, 0, 1], order);
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/test/src/itest-common/src/test/groovy/com/cloudera/itest/posix/AlternativeTest.groovy:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2011, Cloudera, Inc. All Rights Reserved.
3 | *
4 | * Cloudera, Inc. licenses this file to you under the Apache License,
5 | * Version 2.0 (the "License"). You may not use this file except in
6 | * compliance with the License. You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
11 | * CONDITIONS OF ANY KIND, either express or implied. See the License for
12 | * the specific language governing permissions and limitations under the
13 | * License.
14 | */
15 | package com.cloudera.itest.posix
16 |
17 | import org.junit.Test
18 | import static org.junit.Assert.assertTrue
19 |
20 | class AlternativeTest {
21 |
22 | @Test
23 | void testGetAllAlternatives() {
24 | Map groups = Alternative.getAlternatives();
25 | assertTrue("not a single alternative group found. weird.",
26 | groups.size() >0);
27 | assertTrue("there is no alternative for editor. weird.",
28 | groups["editor"] != null);
29 | assertTrue("in the editor alternative there are no actuall alternatives",
30 | groups["editor"].getAlts().size() > 0);
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/test/src/itest-common/src/test/groovy/com/cloudera/itest/posix/ServiceTest.groovy:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2011, Cloudera, Inc. All Rights Reserved.
3 | *
4 | * Cloudera, Inc. licenses this file to you under the Apache License,
5 | * Version 2.0 (the "License"). You may not use this file except in
6 | * compliance with the License. You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
11 | * CONDITIONS OF ANY KIND, either express or implied. See the License for
12 | * the specific language governing permissions and limitations under the
13 | * License.
14 | */
15 | package com.cloudera.itest.posix
16 |
17 | import org.junit.Test
18 | import static org.junit.Assert.assertTrue
19 | import static org.junit.Assert.assertEquals
20 |
21 | class ServiceTest {
22 | private final String name = "rc.local";
23 | Service svc = new Service(name);
24 |
25 | @Test
26 | void testStatus() {
27 | println 'Status ' + svc.status()
28 | assertTrue("Expected a not-null and non-empty string as an ssh service status", svc.status() != null && svc.status() != "")
29 | assertEquals("wrong service name", name, svc.getName());
30 | }
31 |
32 | @Test
33 | void testRunLevels() {
34 | List l = svc.getRunLevels();
35 | assertTrue("Expected a non-zero size list of registered run levels for ssh service",
36 | 0 != l.size());
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/test/src/itest-common/src/test/groovy/com/cloudera/itest/posix/UGITest.groovy:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2011, Cloudera, Inc. All Rights Reserved.
3 | *
4 | * Cloudera, Inc. licenses this file to you under the Apache License,
5 | * Version 2.0 (the "License"). You may not use this file except in
6 | * compliance with the License. You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
11 | * CONDITIONS OF ANY KIND, either express or implied. See the License for
12 | * the specific language governing permissions and limitations under the
13 | * License.
14 | */
15 | package com.cloudera.itest.posix
16 |
17 | import org.junit.Test
18 | import static org.junit.Assert.assertEquals
19 |
20 | class UGITest {
21 | UGI ugi = new UGI();
22 |
23 | @Test
24 | void testUsers() {
25 | assertEquals("expect root uid to be 0",
26 | "0", ugi.getUsers()["root"]["uid"]);
27 | }
28 |
29 | @Test
30 | void testGroups() {
31 | assertEquals("expect root gid to be 0",
32 | "0", ugi.getGroups()["root"]["gid"]);
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/test/src/itest-common/src/test/groovy/com/cloudera/itest/shell/ShellTest.groovy:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2011, Cloudera, Inc. All Rights Reserved.
3 | *
4 | * Cloudera, Inc. licenses this file to you under the Apache License,
5 | * Version 2.0 (the "License"). You may not use this file except in
6 | * compliance with the License. You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
11 | * CONDITIONS OF ANY KIND, either express or implied. See the License for
12 | * the specific language governing permissions and limitations under the
13 | * License.
14 | */
15 | package com.cloudera.itest.shell
16 |
17 | import org.junit.Test
18 |
19 | import static org.junit.Assert.assertEquals
20 | import static org.junit.Assert.assertFalse
21 |
22 | class ShellTest {
23 | @Test
24 | void regularUserShell() {
25 | Shell sh = new Shell("/bin/bash -s")
26 |
27 | sh.exec('A=a ; r() { return $1; } ; echo $A ; r `id -u`')
28 |
29 | assertFalse("${sh.script} exited with a non-zero status", sh.ret == 0)
30 | assertEquals("got wrong stdout ${sh.out}", "a", sh.out[0])
31 | assertEquals("got extra stderr ${sh.err}", 0, sh.err.size())
32 | }
33 |
34 | @Test
35 | void superUserShell() {
36 | Shell sh = new Shell("/bin/bash -s")
37 |
38 | sh.setUser('root')
39 | sh.exec('r() { return $1; } ; r `id -u`')
40 |
41 | assertEquals("${sh.script} exited with a non-zero status", 0, sh.ret)
42 | assertEquals("got extra stdout ${sh.out}", 0, sh.out.size())
43 | assertEquals("got extra stderr ${sh.err}", 0, sh.err.size())
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/test/src/smokes/README:
--------------------------------------------------------------------------------
1 | This repository contains Hadoop stack stack tests such as smokes, load, etc.
2 |
3 | Release Notes:
4 |
5 | at the moment release process is pretty awkward. The following needs to be taken into consideration
6 |
7 | 1) Preparation needs to have extra arguments passed along:
8 | % mvn release:prepare -Darguments="-DskipTests -DskipITs"
9 | 2) Because two different projects (with separate pom files) are kept in the same repository you need to run
10 | % mvn release:perform -Darguments="-DskipTests -DskipITs"
11 | and after it fails
12 | % cd target/checkout
13 | % mvn deploy -DperformRelease -DskipTests -DskipITs
14 | At this point release plugin has prepared everything and it can't just find proper pom.xml
15 | 3) Longevity isn't a module of smokes thus a manual changes of
16 | artifacts versions and amendment of git commits (initially done by
17 | release pluging in 1) above) is required. This will be fixes soon. (FIXME)
18 |
--------------------------------------------------------------------------------
/test/src/smokes/flume/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4.0.0
4 |
5 |
6 | com.cloudera.itest
7 | cdh-smokes
8 | 1.0-cdh3u1-SNAPSHOT
9 |
10 | com.cloudera.itest
11 | flumesmoke
12 | 0.9.3-cdh3u1-SNAPSHOT
13 | flumesmoke
14 |
15 |
16 |
17 | org.apache.hadoop
18 | hadoop-core
19 | 0.20.2-CDH3B4
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/test/src/smokes/flume/src/test/resources/FlumeSmokeBzip2/flume-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | flume.collector.dfs.compress.codec
7 | BZip2Codec
8 |
9 |
10 |
--------------------------------------------------------------------------------
/test/src/smokes/flume/src/test/resources/FlumeSmokeDeflate/flume-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | flume.collector.dfs.compress.codec
7 | DefaultCodec
8 |
9 |
10 |
--------------------------------------------------------------------------------
/test/src/smokes/flume/src/test/resources/FlumeSmokeGzip/flume-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | flume.collector.dfs.compress.codec
7 | GzipCodec
8 |
9 |
10 |
--------------------------------------------------------------------------------
/test/src/smokes/hadoop/README:
--------------------------------------------------------------------------------
1 | This is a project to develop and build Hadoop (HDFS and MAPREDUCE) smoke and system tests.
2 |
3 | To be able to run this a started project is also required such as
4 | http://github.sf.cloudera.com/cos/iTest/blob/master/metaTest/hadoop.pom
5 |
--------------------------------------------------------------------------------
/test/src/smokes/hadoop/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | com.cloudera.itest
6 | cdh-smokes
7 | 1.0-cdh3u1-SNAPSHOT
8 |
9 | 4.0.0
10 | com.cloudera.itest
11 | hadoopsmoke
12 | 0.20.2-cdh3u1-SNAPSHOT
13 | hadoopsmoke
14 |
15 |
16 |
17 | org.apache.hadoop
18 | hadoop-core
19 | 0.20.2-cdh3u0
20 |
21 |
22 | org.apache.hadoop
23 | hadoop-test
24 | 0.20.2-cdh3u0
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/test/src/smokes/hadoop/src/test/resources/cachedir.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/test/src/smokes/hadoop/src/test/resources/cachedir.jar
--------------------------------------------------------------------------------
/test/src/smokes/hadoop/src/test/resources/clitest_data/data120bytes:
--------------------------------------------------------------------------------
1 | 12345678901234
2 | 12345678901234
3 | 12345678901234
4 | 12345678901234
5 | 12345678901234
6 | 12345678901234
7 | 12345678901234
8 | 12345678901234
9 |
--------------------------------------------------------------------------------
/test/src/smokes/hadoop/src/test/resources/clitest_data/data15bytes:
--------------------------------------------------------------------------------
1 | 12345678901234
2 |
--------------------------------------------------------------------------------
/test/src/smokes/hadoop/src/test/resources/clitest_data/data30bytes:
--------------------------------------------------------------------------------
1 | 12345678901234
2 | 12345678901234
3 |
--------------------------------------------------------------------------------
/test/src/smokes/hadoop/src/test/resources/clitest_data/data60bytes:
--------------------------------------------------------------------------------
1 | 12345678901234
2 | 12345678901234
3 | 12345678901234
4 | 12345678901234
5 |
--------------------------------------------------------------------------------
/test/src/smokes/hadoop/src/test/resources/input.txt:
--------------------------------------------------------------------------------
1 | testlink/cache.txt
2 | testlink/cache2.txt
3 |
--------------------------------------------------------------------------------
/test/src/smokes/hadoop/src/test/resources/map.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | xargs cat
3 |
--------------------------------------------------------------------------------
/test/src/smokes/hbase/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4.0.0
4 |
5 |
6 | com.cloudera.itest
7 | cdh-smokes
8 | 1.0-cdh3u1-SNAPSHOT
9 |
10 | com.cloudera.itest
11 | hbasesmoke
12 | 0.90.1-cdh3u1-SNAPSHOT
13 | hbasesmoke
14 |
15 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/README:
--------------------------------------------------------------------------------
1 | This is a project to develop and build Hive smoke and system tests.
2 |
3 | To be able to run this a started project is also required such as
4 | http://github.sf.cloudera.com/cos/iTest/blob/master/metaTest/hive.pom
5 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4.0.0
4 |
5 |
6 | com.cloudera.itest
7 | cdh-smokes
8 | 1.0-cdh3u1-SNAPSHOT
9 |
10 | com.cloudera.itest
11 | hivesmoke
12 | 0.7.0-cdh3u1-SNAPSHOT
13 | hivesmoke
14 |
15 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/a.txt:
--------------------------------------------------------------------------------
1 | 1foo
2 | 2bar
3 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/integration/hbase_pushdown/in:
--------------------------------------------------------------------------------
1 | CREATE TABLE hbase_pushdown(key int, value string)
2 | STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
3 | WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string");
4 |
5 | INSERT OVERWRITE TABLE hbase_pushdown
6 | SELECT *
7 | FROM src;
8 |
9 | -- with full pushdown
10 | explain select * from hbase_pushdown where key=90;
11 |
12 | select * from hbase_pushdown where key=90;
13 |
14 | -- with partial pushdown
15 |
16 | explain select * from hbase_pushdown where key=90 and value like '%90%';
17 |
18 | select * from hbase_pushdown where key=90 and value like '%90%';
19 |
20 | -- with two residuals
21 |
22 | explain select * from hbase_pushdown
23 | where key=90 and value like '%90%' and key=cast(value as int);
24 |
25 | -- with contradictory pushdowns
26 |
27 | explain select * from hbase_pushdown
28 | where key=80 and key=90 and value like '%90%';
29 |
30 | select * from hbase_pushdown
31 | where key=80 and key=90 and value like '%90%';
32 |
33 | -- with nothing to push down
34 |
35 | explain select * from hbase_pushdown;
36 |
37 | -- with a predicate which is not actually part of the filter, so
38 | -- it should be ignored by pushdown
39 |
40 | explain select * from hbase_pushdown
41 | where (case when key=90 then 2 else 4 end) > 3;
42 |
43 | -- with a predicate which is under an OR, so it should
44 | -- be ignored by pushdown
45 |
46 | explain select * from hbase_pushdown
47 | where key=80 or value like '%90%';
48 |
49 | set hive.optimize.ppd.storage=false;
50 |
51 | -- with pushdown disabled
52 |
53 | explain select * from hbase_pushdown where key=90;
54 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/integration/hbase_queries/filter:
--------------------------------------------------------------------------------
1 | sed -e 's#hdfs://[^/]*/#HDFS_URL/#' \
2 | -e 's#createTime:[0-9][0-9]*#createTime:NOW#g' \
3 | -e 's#transient_lastDdlTime=[0-9][0-9]*#transient_lastDdlTime=NOW#g' \
4 | -e '/Map Operator Tree:/,/Reduce Output Operator/d' \
5 | -e '/^ (TOK_QUERY/d' \
6 | -e '/Detailed Table Information/s#owner:[^,]*,#owner:OWNER,#' \
7 | -e 's#name: default.hbase_table_3#name: HBASE_TABLE#' \
8 | -e 's#name: hbase_table_3#name: HBASE_TABLE#'
9 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/integration/hbase_stats/filter:
--------------------------------------------------------------------------------
1 | sed -e 's#hdfs://[^/]*/#HDFS_URL/#' \
2 | -e '/^CreateTime:/d' \
3 | -e '/transient_lastDdlTime/d' \
4 | -e 's#^Owner:.*$#Owner: USER#'
5 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/integration/hbase_stats/in:
--------------------------------------------------------------------------------
1 | set datanucleus.cache.collections=false;
2 |
3 | set hive.stats.dbclass=hbase;
4 |
5 | create table stats_src like src;
6 | insert overwrite table stats_src select * from src;
7 | analyze table stats_src compute statistics;
8 | desc formatted stats_src;
9 |
10 | create table hbase_part like srcpart;
11 |
12 | insert overwrite table hbase_part partition (ds='2010-04-08', hr = '11') select key, value from src;
13 | insert overwrite table hbase_part partition (ds='2010-04-08', hr = '12') select key, value from src;
14 |
15 | analyze table hbase_part partition(ds='2008-04-08', hr=11) compute statistics;
16 | analyze table hbase_part partition(ds='2008-04-08', hr=12) compute statistics;
17 |
18 | desc formatted hbase_part;
19 | desc formatted hbase_part partition (ds='2010-04-08', hr = '11');
20 | desc formatted hbase_part partition (ds='2010-04-08', hr = '12');
21 |
22 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/authorization_2/filter:
--------------------------------------------------------------------------------
1 | sed -e 's#^grantTime.[0-9]*#grantTime\tJUSTNOW#' \
2 | -e 's#^grantor.*$#grantor\t\tBORG#'
3 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/authorization_2/out:
--------------------------------------------------------------------------------
1 | create table authorization_part (key int, value string) partitioned by (ds string)
2 |
3 |
4 | ALTER TABLE authorization_part SET TBLPROPERTIES ("PARTITION_LEVEL_PRIVILEGE"="TRUE")
5 | set hive.security.authorization.enabled=true
6 |
7 |
8 | -- column grant to user
9 | grant Create on table authorization_part to user hive_test_user
10 |
11 | grant Update on table authorization_part to user hive_test_user
12 |
13 | grant Drop on table authorization_part to user hive_test_user
14 |
15 | grant select on table src to user hive_test_user
16 |
17 |
18 | show grant user hive_test_user on table authorization_part
19 |
20 | database default
21 | table authorization_part
22 | principalName hive_test_user
23 | principalType USER
24 | privilege Create
25 | grantTime 1301676873
26 | grantor hudson
27 |
28 | database default
29 | table authorization_part
30 | principalName hive_test_user
31 | principalType USER
32 | privilege Update
33 | grantTime 1301676874
34 | grantor hudson
35 |
36 | database default
37 | table authorization_part
38 | principalName hive_test_user
39 | principalType USER
40 | privilege Drop
41 | grantTime 1301676874
42 | grantor hudson
43 |
44 |
45 | alter table authorization_part add partition (ds='2010')
46 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/auto_join20/filter:
--------------------------------------------------------------------------------
1 | sed -re 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#'
2 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/auto_join20/in:
--------------------------------------------------------------------------------
1 | set hive.auto.convert.join = true;
2 |
3 | explain
4 | select sum(hash(a.k1,a.v1,a.k2,a.v2,a.k3,a.v3))
5 | from (
6 | SELECT src1.key as k1, src1.value as v1, src2.key as k2, src2.value as v2 , src3.key as k3, src3.value as v3
7 | FROM src src1 JOIN src src2 ON (src1.key = src2.key AND src1.key < 10) RIGHT OUTER JOIN src src3 ON (src1.key = src3.key AND src3.key < 20)
8 | SORT BY k1,v1,k2,v2,k3,v3
9 | )a;
10 |
11 | select sum(hash(a.k1,a.v1,a.k2,a.v2,a.k3,a.v3))
12 | from (
13 | SELECT src1.key as k1, src1.value as v1, src2.key as k2, src2.value as v2 , src3.key as k3, src3.value as v3
14 | FROM src src1 JOIN src src2 ON (src1.key = src2.key AND src1.key < 10) RIGHT OUTER JOIN src src3 ON (src1.key = src3.key AND src3.key < 20)
15 | SORT BY k1,v1,k2,v2,k3,v3
16 | )a;
17 |
18 | explain
19 | select sum(hash(a.k1,a.v1,a.k2,a.v2,a.k3,a.v3))
20 | from (
21 | SELECT src1.key as k1, src1.value as v1, src2.key as k2, src2.value as v2 , src3.key as k3, src3.value as v3
22 | FROM src src1 JOIN src src2 ON (src1.key = src2.key AND src1.key < 10 AND src2.key < 15) RIGHT OUTER JOIN src src3 ON (src1.key = src3.key AND src3.key < 20)
23 | SORT BY k1,v1,k2,v2,k3,v3
24 | )a;
25 |
26 | select sum(hash(a.k1,a.v1,a.k2,a.v2,a.k3,a.v3))
27 | from (
28 | SELECT src1.key as k1, src1.value as v1, src2.key as k2, src2.value as v2 , src3.key as k3, src3.value as v3
29 | FROM src src1 JOIN src src2 ON (src1.key = src2.key AND src1.key < 10 AND src2.key < 15) RIGHT OUTER JOIN src src3 ON (src1.key = src3.key AND src3.key < 20)
30 | SORT BY k1,v1,k2,v2,k3,v3
31 | )a;
32 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/basic/filter:
--------------------------------------------------------------------------------
1 | sed -e 's#hdfs://[^/]*/#hdfs://HADOOP/#' \
2 | -e 's#Copying file:.*u.data#Copying file: u.data#' \
3 | -e '/^Deleted.*u_data$/d'
4 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/basic/in:
--------------------------------------------------------------------------------
1 | DROP TABLE u_data;
2 |
3 | CREATE TABLE u_data (
4 | userid INT,
5 | movieid INT,
6 | rating INT,
7 | unixtime STRING)
8 | ROW FORMAT DELIMITED
9 | FIELDS TERMINATED BY '\t'
10 | STORED AS TEXTFILE;
11 |
12 | LOAD DATA LOCAL INPATH 'seed_data_files/ml-data/u.data'
13 | OVERWRITE INTO TABLE u_data;
14 |
15 | INSERT OVERWRITE DIRECTORY '/tmp/count'
16 | SELECT COUNT(1) FROM u_data;
17 | dfs -cat /tmp/count/* ;
18 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/basic/out:
--------------------------------------------------------------------------------
1 | DROP TABLE u_data
2 |
3 |
4 | CREATE TABLE u_data (
5 | userid INT,
6 | movieid INT,
7 | rating INT,
8 | unixtime STRING)
9 | ROW FORMAT DELIMITED
10 | FIELDS TERMINATED BY '\t'
11 | STORED AS TEXTFILE
12 |
13 |
14 | LOAD DATA LOCAL INPATH 'seed_data_files/ml-data/u.data'
15 | OVERWRITE INTO TABLE u_data
16 | Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster-clone/examples/hive/target/seed_data_files/ml-data/u.data
17 | Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/u_data
18 |
19 |
20 | INSERT OVERWRITE DIRECTORY '/tmp/count'
21 | SELECT COUNT(1) FROM u_data
22 | dfs -cat /tmp/count/*
23 | 100000
24 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/bucketizedhiveinputformat/filter:
--------------------------------------------------------------------------------
1 | sed -re 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
2 | -e 's#Copying file:.*/kv..txt#Copying file:kvX.txt#' \
3 | -e '/^Deleted hdfs:/d'
4 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/bucketizedhiveinputformat/in:
--------------------------------------------------------------------------------
1 | set hive.input.format=org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
2 | set mapred.min.split.size = 64;
3 |
4 | CREATE TABLE T1(name STRING) STORED AS TEXTFILE;
5 |
6 | LOAD DATA LOCAL INPATH 'seed_data_files/kv1.txt' INTO TABLE T1;
7 |
8 | CREATE TABLE T2(name STRING) STORED AS SEQUENCEFILE;
9 |
10 | EXPLAIN INSERT OVERWRITE TABLE T2 SELECT * FROM (
11 | SELECT tmp1.name as name FROM (
12 | SELECT name, 'MMM' AS n FROM T1) tmp1
13 | JOIN (SELECT 'MMM' AS n FROM T1) tmp2
14 | JOIN (SELECT 'MMM' AS n FROM T1) tmp3
15 | ON tmp1.n = tmp2.n AND tmp1.n = tmp3.n) ttt LIMIT 5000000;
16 |
17 |
18 | INSERT OVERWRITE TABLE T2 SELECT * FROM (
19 | SELECT tmp1.name as name FROM (
20 | SELECT name, 'MMM' AS n FROM T1) tmp1
21 | JOIN (SELECT 'MMM' AS n FROM T1) tmp2
22 | JOIN (SELECT 'MMM' AS n FROM T1) tmp3
23 | ON tmp1.n = tmp2.n AND tmp1.n = tmp3.n) ttt LIMIT 5000000;
24 |
25 | EXPLAIN SELECT COUNT(1) FROM T2;
26 | SELECT COUNT(1) FROM T2;
27 |
28 | CREATE TABLE T3(name STRING) STORED AS TEXTFILE;
29 | LOAD DATA LOCAL INPATH 'seed_data_files/kv1.txt' INTO TABLE T3;
30 | LOAD DATA LOCAL INPATH 'seed_data_files/kv2.txt' INTO TABLE T3;
31 |
32 | EXPLAIN SELECT COUNT(1) FROM T3;
33 | SELECT COUNT(1) FROM T3;
34 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/bucketmapjoin5/filter:
--------------------------------------------------------------------------------
1 | sed -re 's#hdfs://[^/]*/#hdfs://HADOOP/#g' \
2 | -e 's#Copying file:.*/srcbucket2#Copying file:srcbucket2#' \
3 | -e 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
4 | -e 's#transient_lastDdlTime [0-9]*#transient_lastDdlTime JUSTNOW#'
5 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/drop_multi_partitions/in:
--------------------------------------------------------------------------------
1 | create table mp (a string) partitioned by (b string, c string);
2 |
3 | alter table mp add partition (b='1', c='1');
4 | alter table mp add partition (b='1', c='2');
5 | alter table mp add partition (b='2', c='2');
6 |
7 | show partitions mp;
8 |
9 | explain extended alter table mp drop partition (b='1');
10 | alter table mp drop partition (b='1');
11 |
12 | show partitions mp;
13 |
14 | set hive.exec.drop.ignorenonexistent=false;
15 | alter table mp drop if exists partition (b='3');
16 |
17 | show partitions mp;
18 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/drop_multi_partitions/out:
--------------------------------------------------------------------------------
1 | create table mp (a string) partitioned by (b string, c string)
2 |
3 |
4 | alter table mp add partition (b='1', c='1')
5 |
6 | alter table mp add partition (b='1', c='2')
7 |
8 | alter table mp add partition (b='2', c='2')
9 |
10 |
11 | show partitions mp
12 | b=1/c=1
13 | b=1/c=2
14 | b=2/c=2
15 |
16 |
17 | explain extended alter table mp drop partition (b='1')
18 | ABSTRACT SYNTAX TREE:
19 | (TOK_ALTERTABLE_DROPPARTS mp (TOK_PARTSPEC (TOK_PARTVAL b '1')))
20 |
21 | STAGE DEPENDENCIES:
22 | Stage-0 is a root stage
23 |
24 | STAGE PLANS:
25 | Stage: Stage-0
26 | Drop Table Operator:
27 | Drop Table
28 | table: mp
29 |
30 |
31 |
32 | alter table mp drop partition (b='1')
33 |
34 |
35 | show partitions mp
36 | b=2/c=2
37 | set hive.exec.drop.ignorenonexistent=false
38 |
39 | alter table mp drop if exists partition (b='3')
40 |
41 |
42 | show partitions mp
43 | b=2/c=2
44 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/groupby_map_ppr_multi_distinct/filter:
--------------------------------------------------------------------------------
1 | sed -re 's#hdfs://[^/]*/#hdfs://HADOOP/#' \
2 | -e 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
3 | -e 's#transient_lastDdlTime [0-9]*#transient_lastDdlTime JUSTNOW#'
4 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/groupby_map_ppr_multi_distinct/in:
--------------------------------------------------------------------------------
1 | set hive.map.aggr=true;
2 | set hive.groupby.skewindata=false;
3 | set mapred.reduce.tasks=31;
4 |
5 | CREATE TABLE dest1(key STRING, c1 INT, c2 STRING, C3 INT, c4 INT) STORED AS TEXTFILE;
6 |
7 | EXPLAIN EXTENDED
8 | FROM srcpart src
9 | INSERT OVERWRITE TABLE dest1
10 | SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(DISTINCT src.value)
11 | WHERE src.ds = '2008-04-08'
12 | GROUP BY substr(src.key,1,1);
13 |
14 | FROM srcpart src
15 | INSERT OVERWRITE TABLE dest1
16 | SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(DISTINCT src.value)
17 | WHERE src.ds = '2008-04-08'
18 | GROUP BY substr(src.key,1,1);
19 |
20 | SELECT dest1.* FROM dest1;
21 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/index_creation/filter:
--------------------------------------------------------------------------------
1 | sed -re 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' |
2 | sed -e 's#hdfs://[^/]*/#hdfs://HADOOP/#' \
3 | -e 's#owner:[^,]*,#owner:BORG,#' \
4 | -e 's#createTime:[0-9]*,#createTime:JUSTNOW#' \
5 | -e 's#location:hdfs://[^/]*/#location:hdfs://HADOOP/#' \
6 | -e 's#transient_lastDdlTime=[0-9]*}#transient_lastDdlTime=JUSTNOW}#'
7 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/join19/in:
--------------------------------------------------------------------------------
1 | CREATE TABLE triples (foo string, subject string, predicate string, object string, foo2 string);
2 |
3 | EXPLAIN
4 | SELECT t11.subject, t22.object , t33.subject , t55.object, t66.object
5 | FROM
6 | (
7 | SELECT t1.subject
8 | FROM triples t1
9 | WHERE
10 | t1.predicate='http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL'
11 | AND
12 | t1.object='http://ontos/OntosMiner/Common.English/ontology#Citation'
13 | ) t11
14 | JOIN
15 | (
16 | SELECT t2.subject , t2.object
17 | FROM triples t2
18 | WHERE
19 | t2.predicate='http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL'
20 | ) t22
21 | ON (t11.subject=t22.subject)
22 | JOIN
23 | (
24 | SELECT t3.subject , t3.object
25 | FROM triples t3
26 | WHERE
27 | t3.predicate='http://www.ontosearch.com/2007/12/ontosofa-ns#_from'
28 |
29 | ) t33
30 | ON (t11.subject=t33.object)
31 | JOIN
32 | (
33 | SELECT t4.subject
34 | FROM triples t4
35 | WHERE
36 | t4.predicate='http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL'
37 | AND
38 | t4.object='http://ontos/OntosMiner/Common.English/ontology#Author'
39 |
40 | ) t44
41 | ON (t44.subject=t33.subject)
42 | JOIN
43 | (
44 | SELECT t5.subject, t5.object
45 | FROM triples t5
46 | WHERE
47 | t5.predicate='http://www.ontosearch.com/2007/12/ontosofa-ns#_to'
48 | ) t55
49 | ON (t55.subject=t44.subject)
50 | JOIN
51 | (
52 | SELECT t6.subject, t6.object
53 | FROM triples t6
54 | WHERE
55 | t6.predicate='http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL'
56 | ) t66
57 | ON (t66.subject=t55.object);
58 |
59 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/join_filters/filter:
--------------------------------------------------------------------------------
1 | sed -e 's#Copying file:.*/in..txt#Copying file:inX.txt#'
2 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/load_dyn_part14/filter:
--------------------------------------------------------------------------------
1 | sed -re 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' |
2 | sed -e 's#owner:[^,]*,#owner:BORG,#' \
3 | -e 's#createTime:[0-9]*,#createTime:JUSTNOW#' \
4 | -e 's#location:hdfs://[^/]*/#location:hdfs://HADOOP/#' \
5 | -e 's#{transient_lastDdlTime=[0-9]*}#{transient_lastDdlTime=JUSTNOW}#'
6 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/load_dyn_part14/in:
--------------------------------------------------------------------------------
1 |
2 | create table if not exists nzhang_part14 (key string)
3 | partitioned by (value string);
4 |
5 | describe extended nzhang_part14;
6 |
7 | set hive.exec.dynamic.partition=true;
8 | set hive.exec.dynamic.partition.mode=nonstrict;
9 |
10 | explain
11 | insert overwrite table nzhang_part14 partition(value)
12 | select key, value from (
13 | select 'k1' as key, cast(null as string) as value from src limit 2
14 | union all
15 | select 'k2' as key, '' as value from src limit 2
16 | union all
17 | select 'k3' as key, ' ' as value from src limit 2
18 | ) T;
19 |
20 | insert overwrite table nzhang_part14 partition(value)
21 | select key, value from (
22 | select 'k1' as key, cast(null as string) as value from src limit 2
23 | union all
24 | select 'k2' as key, '' as value from src limit 2
25 | union all
26 | select 'k3' as key, ' ' as value from src limit 2
27 | ) T;
28 |
29 |
30 | show partitions nzhang_part14;
31 |
32 | select * from nzhang_part14 where value <> 'a'
33 | order by key, value;
34 |
35 |
36 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/merge_dynamic_partition/filter:
--------------------------------------------------------------------------------
1 | sed -re 's#Copying file:.*/srcbucket#Copying file:srcbucket#' \
2 | -e 's#^owner:.*$#owner:BORG#' \
3 | -e 's#hdfs://[^/]*/#hdfs://HADOOP/#' \
4 | -e 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
5 | -e 's#last(Access|Update)Time:[0-9]*#last\1Time:JUSTNOW#'
6 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/multi_insert/filter:
--------------------------------------------------------------------------------
1 | sed -re 's#hdfs://[^/]*/#hdfs://HADOOP/#' \
2 | -e 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
3 | -e 's#Deleted hdfs://.*src_multi.$#Deleted hdfs://src_multi#'
4 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/rcfile_columnar/filter:
--------------------------------------------------------------------------------
1 | sed -e 's#hdfs://[^/]*/#hdfs://HADOOP/#'
2 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/rcfile_columnar/in:
--------------------------------------------------------------------------------
1 |
2 | CREATE table columnTable (key STRING, value STRING)
3 | ROW FORMAT SERDE
4 | 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
5 | STORED AS
6 | INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat'
7 | OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat';
8 |
9 | FROM src
10 | INSERT OVERWRITE TABLE columnTable SELECT src.key, src.value LIMIT 10;
11 | describe columnTable;
12 |
13 | SELECT columnTable.* FROM columnTable ORDER BY columnTable.key;
14 |
15 |
16 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/rcfile_columnar/out:
--------------------------------------------------------------------------------
1 |
2 | CREATE table columnTable (key STRING, value STRING)
3 | ROW FORMAT SERDE
4 | 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
5 | STORED AS
6 | INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat'
7 | OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat'
8 |
9 |
10 | FROM src
11 | INSERT OVERWRITE TABLE columnTable SELECT src.key, src.value LIMIT 10
12 | Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/columntable
13 |
14 | describe columnTable
15 | key string from deserializer
16 | value string from deserializer
17 |
18 |
19 | SELECT columnTable.* FROM columnTable ORDER BY columnTable.key
20 | 165 val_165
21 | 238 val_238
22 | 255 val_255
23 | 27 val_27
24 | 278 val_278
25 | 311 val_311
26 | 409 val_409
27 | 484 val_484
28 | 86 val_86
29 | 98 val_98
30 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/stats8/filter:
--------------------------------------------------------------------------------
1 | sed -re 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' |
2 | sed -e 's#hdfs://[^/]*/#hdfs://HADOOP/#' \
3 | -e 's#owner:[^,]*,#owner:BORG,#' \
4 | -e 's#createTime:[0-9]*,#createTime:JUSTNOW#' \
5 | -e 's#location:hdfs://[^/]*/#location:hdfs://HADOOP/#' \
6 | -e 's#transient_lastDdlTime=[0-9]*#transient_lastDdlTime=JUSTNOW#'
7 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/stats8/in:
--------------------------------------------------------------------------------
1 | set datanucleus.cache.collections=false;
2 | set hive.stats.autogather=false;
3 | set hive.exec.dynamic.partition=true;
4 | set hive.exec.dynamic.partition.mode=nonstrict;
5 |
6 | create table analyze_srcpart like srcpart;
7 | insert overwrite table analyze_srcpart partition (ds, hr) select * from srcpart where ds is not null;
8 |
9 | explain analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics;
10 | analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics;
11 | describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11);
12 | describe extended analyze_srcpart;
13 |
14 | explain analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics;
15 | analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics;
16 | describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12);
17 |
18 | explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=11) compute statistics;
19 | analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=11) compute statistics;
20 | describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11);
21 |
22 | explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=12) compute statistics;
23 | analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=12) compute statistics;
24 | describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12);
25 |
26 | explain analyze table analyze_srcpart PARTITION(ds, hr) compute statistics;
27 | analyze table analyze_srcpart PARTITION(ds, hr) compute statistics;
28 |
29 | describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11);
30 | describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12);
31 | describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11);
32 | describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12);
33 | describe extended analyze_srcpart;
34 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/union3/filter:
--------------------------------------------------------------------------------
1 | sed -re 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
2 | -e '/^Deleted hdfs/d'
3 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/union3/in:
--------------------------------------------------------------------------------
1 |
2 | explain
3 | SELECT *
4 | FROM (
5 | SELECT 1 AS id
6 | FROM (SELECT * FROM src LIMIT 1) s1
7 | CLUSTER BY id
8 | UNION ALL
9 | SELECT 2 AS id
10 | FROM (SELECT * FROM src LIMIT 1) s1
11 | CLUSTER BY id
12 | UNION ALL
13 | SELECT 3 AS id
14 | FROM (SELECT * FROM src LIMIT 1) s2
15 | UNION ALL
16 | SELECT 4 AS id
17 | FROM (SELECT * FROM src LIMIT 1) s2
18 | ) a;
19 |
20 |
21 |
22 | CREATE TABLE union_out (id int);
23 |
24 | insert overwrite table union_out
25 | SELECT *
26 | FROM (
27 | SELECT 1 AS id
28 | FROM (SELECT * FROM src LIMIT 1) s1
29 | CLUSTER BY id
30 | UNION ALL
31 | SELECT 2 AS id
32 | FROM (SELECT * FROM src LIMIT 1) s1
33 | CLUSTER BY id
34 | UNION ALL
35 | SELECT 3 AS id
36 | FROM (SELECT * FROM src LIMIT 1) s2
37 | UNION ALL
38 | SELECT 4 AS id
39 | FROM (SELECT * FROM src LIMIT 1) s2
40 | ) a;
41 |
42 | select * from union_out cluster by id;
43 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/uniquejoin/filter:
--------------------------------------------------------------------------------
1 | sed -re 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
2 | -e 's#Copying file:.*/T..txt#Copying file:TX.txt#' \
3 | -e '/^Deleted hdfs:/d'
4 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/scripts/ql/uniquejoin/in:
--------------------------------------------------------------------------------
1 | CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE;
2 | CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
3 | CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE;
4 |
5 | LOAD DATA LOCAL INPATH 'seed_data_files/T1.txt' INTO TABLE T1;
6 | LOAD DATA LOCAL INPATH 'seed_data_files/T2.txt' INTO TABLE T2;
7 | LOAD DATA LOCAL INPATH 'seed_data_files/T3.txt' INTO TABLE T3;
8 |
9 | FROM UNIQUEJOIN PRESERVE T1 a (a.key), PRESERVE T2 b (b.key), PRESERVE T3 c (c.key)
10 | SELECT a.key, b.key, c.key;
11 |
12 | FROM UNIQUEJOIN T1 a (a.key), T2 b (b.key), T3 c (c.key)
13 | SELECT a.key, b.key, c.key;
14 |
15 | FROM UNIQUEJOIN T1 a (a.key), T2 b (b.key-1), T3 c (c.key)
16 | SELECT a.key, b.key, c.key;
17 |
18 | FROM UNIQUEJOIN PRESERVE T1 a (a.key, a.val), PRESERVE T2 b (b.key, b.val), PRESERVE T3 c (c.key, c.val)
19 | SELECT a.key, a.val, b.key, b.val, c.key, c.val;
20 |
21 | FROM UNIQUEJOIN PRESERVE T1 a (a.key), T2 b (b.key), PRESERVE T3 c (c.key)
22 | SELECT a.key, b.key, c.key;
23 |
24 | FROM UNIQUEJOIN PRESERVE T1 a (a.key), T2 b(b.key)
25 | SELECT a.key, b.key;
26 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/T1.txt:
--------------------------------------------------------------------------------
1 | 111
2 | 212
3 | 313
4 | 717
5 | 818
6 | 828
7 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/T2.txt:
--------------------------------------------------------------------------------
1 | 222
2 | 313
3 | 414
4 | 515
5 | 818
6 | 818
7 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/T3.txt:
--------------------------------------------------------------------------------
1 | 212
2 | 414
3 | 616
4 | 717
5 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/apache.access.2.log:
--------------------------------------------------------------------------------
1 | 127.0.0.1 - - [26/May/2009:00:00:00 +0000] "GET /someurl/?track=Blabla(Main) HTTP/1.1" 200 5864 - "Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.65 Safari/525.19"
2 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/apache.access.log:
--------------------------------------------------------------------------------
1 | 127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326
2 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/complex.seq:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/test/src/smokes/hive/src/test/resources/seed_data_files/complex.seq
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/covar_tab.txt:
--------------------------------------------------------------------------------
1 | 1 15
2 | 2 3
3 | 3 7 12
4 | 4 4 14
5 | 5 8 17
6 | 6 2 11
7 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/create_nested_type.txt:
--------------------------------------------------------------------------------
1 | a0b00b01c001C001c002C002c011\Nc012C012d01d011d012d02d021d022
2 | a1b10c001C001c002C002d01d011d012d02\N
3 | a2c001\Nc002C002c011C011c012C012d01\Nd012d02d021d022
4 | a3\N\N\N
5 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/datatypes.txt:
--------------------------------------------------------------------------------
1 | \N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N
2 | -1false-1.1\N\N\N-1-1-1.0-1\N\N
3 | 1true1.11121x2ykva92.2111.01abcd1111213142212212x1abcd2
4 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/docurl.txt:
--------------------------------------------------------------------------------
1 | bonework Chad bullfrog almighty blubberers cynodictis boilersmith cosmopolitical corrie autoincrements
2 | casings choked colpohysterotomy comedist cradleman annexa agronomical archmockery Cocles adhaka
3 | daekon link anticrochet auricular cheeked Arbon alder-leaved
4 | darlingness breamed company carbureted comediette condensery link
5 | daekon link anticrochet auricular cheeked Arbon alder-leaved
6 | darlingness breamed company carbureted comediette condensery link
7 | daekon link anticrochet auricular cheeked Arbon alder-leaved darlingness breamed company carbureted comediette condensery link
8 | daekon link anticrochet auricular cheeked Arbon alder-leaved darlingness breamed company carbureted comediette condensery link
9 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/empty1.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/test/src/smokes/hive/src/test/resources/seed_data_files/empty1.txt
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/empty2.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/test/src/smokes/hive/src/test/resources/seed_data_files/empty2.txt
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/hive_626_bar.txt:
--------------------------------------------------------------------------------
1 | 10,0,1,1,bar10,a,b,c,d
2 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/hive_626_count.txt:
--------------------------------------------------------------------------------
1 | 10,2
2 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/hive_626_foo.txt:
--------------------------------------------------------------------------------
1 | 1,foo1,a,b,c,d
2 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/in1.txt:
--------------------------------------------------------------------------------
1 | 35
2 | 48
3 | 100100
4 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/in2.txt:
--------------------------------------------------------------------------------
1 | 135
2 | 148
3 | 200200
4 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/in3.txt:
--------------------------------------------------------------------------------
1 | 1235
2 | 40
3 | 48
4 | 100100
5 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/in4.txt:
--------------------------------------------------------------------------------
1 | 35236
2 | 101000501
3 | 100100103
4 | 12802
5 | 101005
6 | 10100454
7 | 12100757
8 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/in5.txt:
--------------------------------------------------------------------------------
1 | 51000566
2 | 151001566
3 | 201002066
4 | 251002588
5 | 301003066
6 | 351003588
7 | 401004066
8 | 401004088
9 | 501005088
10 | 501005066
11 | 501005088
12 | 601004066
13 | 601004066
14 | 701004066
15 | 701004066
16 | 801004088
17 | 801004088
18 | 1005066
19 | 66
20 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/in6.txt:
--------------------------------------------------------------------------------
1 | 51000566
2 | 101001066
3 | 201002066
4 | 251002566
5 | 301003088
6 | 351003588
7 | 401004066
8 | 401004088
9 | 501005066
10 | 501005088
11 | 501005066
12 | 601004066
13 | 601004066
14 | 701004088
15 | 701004088
16 | 801004066
17 | 801004066
18 | 1005066
19 | 66
20 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/json.txt:
--------------------------------------------------------------------------------
1 | {"store":{"fruit":[{"weight":8,"type":"apple"},{"weight":9,"type":"pear"}],"basket":[[1,2,{"b":"y","a":"x"}],[3,4],[5,6]],"book":[{"author":"Nigel Rees","title":"Sayings of the Century","category":"reference","price":8.95},{"author":"Herman Melville","title":"Moby Dick","category":"fiction","price":8.99,"isbn":"0-553-21311-3"},{"author":"J. R. R. Tolkien","title":"The Lord of the Rings","category":"fiction","reader":[{"age":25,"name":"bob"},{"age":26,"name":"jack"}],"price":22.99,"isbn":"0-395-19395-8"}],"bicycle":{"price":19.95,"color":"red"}},"email":"amy@only_for_json_udf_test.net","owner":"amy"}
2 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/kv1.seq:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/test/src/smokes/hive/src/test/resources/seed_data_files/kv1.seq
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/kv1_broken.seq:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/test/src/smokes/hive/src/test/resources/seed_data_files/kv1_broken.seq
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/kv3.txt:
--------------------------------------------------------------------------------
1 | 238val_238
2 |
3 | 311val_311
4 | val_27
5 | val_165
6 | val_409
7 | 255val_255
8 | 278val_278
9 | 98val_98
10 | val_484
11 | val_265
12 | val_193
13 | 401val_401
14 | 150val_150
15 | 273val_273
16 | 224
17 | 369
18 | 66val_66
19 | 128
20 | 213val_213
21 | 146val_146
22 | 406val_406
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/kv4.txt:
--------------------------------------------------------------------------------
1 | 邵铮
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/kv5.txt:
--------------------------------------------------------------------------------
1 | 238val_238
2 | 238val_239
3 | 86val_86
4 | 238val_240
5 | 311val_311
6 | 27val_27
7 | 165val_165
8 | 213val_213
9 | 409val_409
10 | 255val_255
11 | 278val_278
12 | 98val_98
13 | 484val_484
14 | 265val_265
15 | 213val_214
16 | 193val_193
17 | 401val_401
18 | 150val_150
19 | 273val_273
20 | 224val_224
21 | 369val_369
22 | 66val_66
23 | 128val_128
24 | 213val_213
25 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/kv6.txt:
--------------------------------------------------------------------------------
1 | 00
2 | 01
3 | 02
4 | 03
5 | 04
6 | 05
7 | 06
8 | 07
9 | 08
10 | 09
11 | 010
12 | 011
13 | 012
14 | 013
15 | 014
16 | 015
17 | 016
18 | 017
19 | 018
20 | 019
21 | 020
22 | 021
23 | 022
24 | 023
25 | 024
26 | 025
27 | 026
28 | 027
29 | 028
30 | 029
31 | 030
32 | 031
33 | 032
34 | 033
35 | 034
36 | 035
37 | 036
38 | 037
39 | 038
40 | 039
41 | 040
42 | 041
43 | 042
44 | 043
45 | 044
46 | 045
47 | 046
48 | 047
49 | 048
50 | 049
51 | 10
52 | 11
53 | 12
54 | 13
55 | 14
56 | 15
57 | 16
58 | 17
59 | 18
60 | 19
61 | 110
62 | 111
63 | 112
64 | 113
65 | 114
66 | 115
67 | 116
68 | 117
69 | 118
70 | 119
71 | 120
72 | 121
73 | 122
74 | 123
75 | 124
76 | 125
77 | 126
78 | 127
79 | 128
80 | 129
81 | 130
82 | 131
83 | 132
84 | 133
85 | 134
86 | 135
87 | 136
88 | 137
89 | 138
90 | 139
91 | 140
92 | 141
93 | 142
94 | 143
95 | 144
96 | 145
97 | 146
98 | 147
99 | 148
100 | 149
101 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/lt100.sorted.txt:
--------------------------------------------------------------------------------
1 | 0val_0
2 | 0val_0
3 | 0val_0
4 | 10val_10
5 | 11val_11
6 | 12val_12
7 | 12val_12
8 | 15val_15
9 | 15val_15
10 | 17val_17
11 | 18val_18
12 | 18val_18
13 | 19val_19
14 | 2val_2
15 | 20val_20
16 | 24val_24
17 | 24val_24
18 | 26val_26
19 | 26val_26
20 | 27val_27
21 | 28val_28
22 | 30val_30
23 | 33val_33
24 | 34val_34
25 | 35val_35
26 | 35val_35
27 | 35val_35
28 | 37val_37
29 | 37val_37
30 | 4val_4
31 | 41val_41
32 | 42val_42
33 | 42val_42
34 | 43val_43
35 | 44val_44
36 | 47val_47
37 | 5val_5
38 | 5val_5
39 | 5val_5
40 | 51val_51
41 | 51val_51
42 | 53val_53
43 | 54val_54
44 | 57val_57
45 | 58val_58
46 | 58val_58
47 | 64val_64
48 | 65val_65
49 | 66val_66
50 | 67val_67
51 | 67val_67
52 | 69val_69
53 | 70val_70
54 | 70val_70
55 | 70val_70
56 | 72val_72
57 | 72val_72
58 | 74val_74
59 | 76val_76
60 | 76val_76
61 | 77val_77
62 | 78val_78
63 | 8val_8
64 | 80val_80
65 | 82val_82
66 | 83val_83
67 | 83val_83
68 | 84val_84
69 | 84val_84
70 | 85val_85
71 | 86val_86
72 | 87val_87
73 | 9val_9
74 | 90val_90
75 | 90val_90
76 | 90val_90
77 | 92val_92
78 | 95val_95
79 | 95val_95
80 | 96val_96
81 | 97val_97
82 | 97val_97
83 | 98val_98
84 | 98val_98
85 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/lt100.txt:
--------------------------------------------------------------------------------
1 | 86val_86
2 | 27val_27
3 | 98val_98
4 | 66val_66
5 | 37val_37
6 | 15val_15
7 | 82val_82
8 | 17val_17
9 | 0val_0
10 | 57val_57
11 | 20val_20
12 | 92val_92
13 | 47val_47
14 | 72val_72
15 | 4val_4
16 | 35val_35
17 | 54val_54
18 | 51val_51
19 | 65val_65
20 | 83val_83
21 | 12val_12
22 | 67val_67
23 | 84val_84
24 | 58val_58
25 | 8val_8
26 | 24val_24
27 | 42val_42
28 | 0val_0
29 | 96val_96
30 | 26val_26
31 | 51val_51
32 | 43val_43
33 | 95val_95
34 | 98val_98
35 | 85val_85
36 | 77val_77
37 | 0val_0
38 | 87val_87
39 | 15val_15
40 | 72val_72
41 | 90val_90
42 | 19val_19
43 | 10val_10
44 | 5val_5
45 | 58val_58
46 | 35val_35
47 | 95val_95
48 | 11val_11
49 | 34val_34
50 | 42val_42
51 | 78val_78
52 | 76val_76
53 | 41val_41
54 | 30val_30
55 | 64val_64
56 | 76val_76
57 | 74val_74
58 | 69val_69
59 | 33val_33
60 | 70val_70
61 | 5val_5
62 | 2val_2
63 | 35val_35
64 | 80val_80
65 | 44val_44
66 | 53val_53
67 | 90val_90
68 | 12val_12
69 | 5val_5
70 | 70val_70
71 | 24val_24
72 | 70val_70
73 | 83val_83
74 | 26val_26
75 | 67val_67
76 | 18val_18
77 | 9val_9
78 | 18val_18
79 | 97val_97
80 | 84val_84
81 | 28val_28
82 | 37val_37
83 | 90val_90
84 | 97val_97
85 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/lt100.txt.deflate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/test/src/smokes/hive/src/test/resources/seed_data_files/lt100.txt.deflate
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/ml-data/allbut.pl:
--------------------------------------------------------------------------------
1 | #!/usr/local/bin/perl
2 |
3 | # get args
4 | if (@ARGV < 3) {
5 | print STDERR "Usage: $0 base_name start stop max_test [ratings ...]\n";
6 | exit 1;
7 | }
8 | $basename = shift;
9 | $start = shift;
10 | $stop = shift;
11 | $maxtest = shift;
12 |
13 | # open files
14 | open( TESTFILE, ">$basename.test" ) or die "Cannot open $basename.test for writing\n";
15 | open( BASEFILE, ">$basename.base" ) or die "Cannot open $basename.base for writing\n";
16 |
17 | # init variables
18 | $testcnt = 0;
19 |
20 | while (<>) {
21 | ($user) = split;
22 | if (! defined $ratingcnt{$user}) {
23 | $ratingcnt{$user} = 0;
24 | }
25 | ++$ratingcnt{$user};
26 | if (($testcnt < $maxtest || $maxtest <= 0)
27 | && $ratingcnt{$user} >= $start && $ratingcnt{$user} <= $stop) {
28 | ++$testcnt;
29 | print TESTFILE;
30 | }
31 | else {
32 | print BASEFILE;
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/ml-data/mku.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | trap `rm -f tmp.$$; exit 1` 1 2 15
4 |
5 | for i in 1 2 3 4 5
6 | do
7 | head -`expr $i \* 20000` u.data | tail -20000 > tmp.$$
8 | sort -t" " -k 1,1n -k 2,2n tmp.$$ > u$i.test
9 | head -`expr \( $i - 1 \) \* 20000` u.data > tmp.$$
10 | tail -`expr \( 5 - $i \) \* 20000` u.data >> tmp.$$
11 | sort -t" " -k 1,1n -k 2,2n tmp.$$ > u$i.base
12 | done
13 |
14 | allbut.pl ua 1 10 100000 u.data
15 | sort -t" " -k 1,1n -k 2,2n ua.base > tmp.$$
16 | mv tmp.$$ ua.base
17 | sort -t" " -k 1,1n -k 2,2n ua.test > tmp.$$
18 | mv tmp.$$ ua.test
19 |
20 | allbut.pl ub 11 20 100000 u.data
21 | sort -t" " -k 1,1n -k 2,2n ub.base > tmp.$$
22 | mv tmp.$$ ub.base
23 | sort -t" " -k 1,1n -k 2,2n ub.test > tmp.$$
24 | mv tmp.$$ ub.test
25 |
26 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/ml-data/u.genre:
--------------------------------------------------------------------------------
1 | unknown|0
2 | Action|1
3 | Adventure|2
4 | Animation|3
5 | Children's|4
6 | Comedy|5
7 | Crime|6
8 | Documentary|7
9 | Drama|8
10 | Fantasy|9
11 | Film-Noir|10
12 | Horror|11
13 | Musical|12
14 | Mystery|13
15 | Romance|14
16 | Sci-Fi|15
17 | Thriller|16
18 | War|17
19 | Western|18
20 |
21 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/ml-data/u.info:
--------------------------------------------------------------------------------
1 | 943 users
2 | 1682 items
3 | 100000 ratings
4 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/ml-data/u.item:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/test/src/smokes/hive/src/test/resources/seed_data_files/ml-data/u.item
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/ml-data/u.occupation:
--------------------------------------------------------------------------------
1 | administrator
2 | artist
3 | doctor
4 | educator
5 | engineer
6 | entertainment
7 | executive
8 | healthcare
9 | homemaker
10 | lawyer
11 | librarian
12 | marketing
13 | none
14 | other
15 | programmer
16 | retired
17 | salesman
18 | scientist
19 | student
20 | technician
21 | writer
22 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/null.txt:
--------------------------------------------------------------------------------
1 | 1.01same0
2 | 1.01same1
3 | 1.01same2
4 | 1.01same3
5 | 1.01same4
6 | \N1same5
7 | \N\Nsame6
8 | 1.0\Nsame7
9 | 1.01same8
10 | 1.01same9
11 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/nullfile.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/test/src/smokes/hive/src/test/resources/seed_data_files/nullfile.txt
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/smb_bucket_input.rc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/test/src/smokes/hive/src/test/resources/seed_data_files/smb_bucket_input.rc
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/smb_bucket_input.txt:
--------------------------------------------------------------------------------
1 | 4val_356
2 | 484val_169
3 | 1000val_1000
4 | 2000val_169
5 | 3000val_169
6 | 4000val_125
7 | 5000val_125
8 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/smbbucket_1.rc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/test/src/smokes/hive/src/test/resources/seed_data_files/smbbucket_1.rc
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/smbbucket_1.txt:
--------------------------------------------------------------------------------
1 | 1val_1
2 | 3val_3
3 | 4val_4
4 | 5val_5
5 | 10val_10
6 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/smbbucket_2.rc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/test/src/smokes/hive/src/test/resources/seed_data_files/smbbucket_2.rc
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/smbbucket_2.txt:
--------------------------------------------------------------------------------
1 | 20val_20
2 | 23val_23
3 | 25val_25
4 | 30val_30
5 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/smbbucket_3.rc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudera/bigtop/ccb46e3cd57dc0f1535146d96567319823754bd8/test/src/smokes/hive/src/test/resources/seed_data_files/smbbucket_3.rc
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/smbbucket_3.txt:
--------------------------------------------------------------------------------
1 | 4val_4
2 | 10val_10
3 | 17val_17
4 | 19val_19
5 | 20val_20
6 | 23val_23
7 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/source.txt:
--------------------------------------------------------------------------------
1 | EXPLAIN
2 | SELECT x.* FROM SRC x;
3 |
4 | SELECT x.* FROM SRC x;
5 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/string.txt:
--------------------------------------------------------------------------------
1 | a bc 1
2 | test 2
3 | test 3
4 | test test 4
5 | testtest 5
6 | test test 6
7 | 7
8 | 8
9 | 9
10 | 10
11 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/symlink1.txt:
--------------------------------------------------------------------------------
1 | ../data/files/T1.txt
2 | ../data/files/T3.txt
3 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/symlink2.txt:
--------------------------------------------------------------------------------
1 | ../data/files/T2.txt
2 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/test.dat:
--------------------------------------------------------------------------------
1 | 1
2 | 2
3 | 3
4 | 4
5 | 5
6 | 6
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/seed_data_files/union_input.txt:
--------------------------------------------------------------------------------
1 | 011oneone
2 | 12.02twotwo
3 | 2threefour3threefour
4 | 35five5fivefive
5 | 2sixseven6sixseven
6 | 38eight8eighteight
7 | 099ninenine
8 | 110.010tenten
9 |
--------------------------------------------------------------------------------
/test/src/smokes/hive/src/test/resources/test.hql:
--------------------------------------------------------------------------------
1 | DROP TABLE u_data;
2 |
3 | CREATE TABLE u_data (
4 | userid INT,
5 | movieid INT,
6 | rating INT,
7 | unixtime STRING)
8 | ROW FORMAT DELIMITED
9 | FIELDS TERMINATED BY '\t'
10 | STORED AS TEXTFILE;
11 |
12 | LOAD DATA LOCAL INPATH 'ml-data/u.data'
13 | OVERWRITE INTO TABLE u_data;
14 |
15 | INSERT OVERWRITE DIRECTORY '/tmp/count'
16 | SELECT COUNT(1) FROM u_data;
17 |
--------------------------------------------------------------------------------
/test/src/smokes/oozie/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4.0.0
4 |
5 |
6 | com.cloudera.itest
7 | cdh-smokes
8 | 1.0-cdh3u1-SNAPSHOT
9 |
10 | com.cloudera.itest
11 | ooziesmoke
12 | 2.3.0-cdh3u1-SNAPSHOT
13 | ooziesmoke
14 |
15 |
16 |
17 | org.apache.hadoop
18 | hadoop-core
19 | 0.20.2-cdh3u0
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/test/src/smokes/package/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | com.cloudera.itest
6 | cdh-smokes
7 | 1.0-cdh3u1-SNAPSHOT
8 |
9 | 4.0.0
10 | com.cloudera.itest
11 | packagesmoke
12 | 1.0-cdh3u1-SNAPSHOT
13 | packagesmoke
14 |
15 |
16 |
17 | junit
18 | junit
19 | 4.8.2
20 | compile
21 |
22 |
23 |
24 | org.apache.ant
25 | ant-launcher
26 | 1.8.2
27 |
28 |
29 |
30 | org.apache.ant
31 | ant-junit
32 | 1.8.2
33 |
34 |
35 |
36 | com.cloudera.itest
37 | hadoopsmoke
38 | 0.20.2-cdh3u1-SNAPSHOT
39 | test-jar
40 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/test/src/smokes/package/src/test/groovy/com/cloudera/itest/packagesmoke/PackageTestErrorProxy.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 | package com.cloudera.itest.packagesmoke;
19 |
20 | import org.hamcrest.Matcher;
21 | import org.junit.rules.ErrorCollector;
22 | import java.util.concurrent.Callable;
23 | import static org.junit.Assert.assertThat;
24 |
25 | public class PackageTestErrorProxy {
26 | static public void checkThat(ErrorCollector ec, final String msg, final Object value, final Matcher