├── .gitignore ├── README.md ├── code ├── Flink │ ├── flink-basis-java │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── heibaiying │ │ │ │ └── StreamingJob.java │ │ │ └── resources │ │ │ └── log4j.properties │ ├── flink-basis-scala │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── resources │ │ │ ├── log4j.properties │ │ │ └── wordcount.txt │ │ │ └── scala │ │ │ └── com │ │ │ └── heibaiying │ │ │ ├── WordCountBatch.scala │ │ │ └── WordCountStreaming.scala │ ├── flink-kafka-integration │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── heibaiying │ │ │ │ ├── CustomSinkJob.java │ │ │ │ ├── KafkaStreamingJob.java │ │ │ │ ├── bean │ │ │ │ └── Employee.java │ │ │ │ └── sink │ │ │ │ └── FlinkToMySQLSink.java │ │ │ └── resources │ │ │ └── log4j.properties │ └── flink-state-management │ │ ├── pom.xml │ │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── heibaiying │ │ │ ├── keyedstate │ │ │ ├── KeyedStateJob.java │ │ │ ├── ThresholdWarning.java │ │ │ └── ThresholdWarningWithTTL.java │ │ │ └── operatorstate │ │ │ ├── OperatorStateJob.java │ │ │ └── ThresholdWarning.java │ │ └── resources │ │ └── log4j.properties ├── Hadoop │ ├── hadoop-word-count │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── heibaiying │ │ │ │ ├── WordCountApp.java │ │ │ │ ├── WordCountCombinerApp.java │ │ │ │ ├── WordCountCombinerPartitionerApp.java │ │ │ │ ├── component │ │ │ │ ├── CustomPartitioner.java │ │ │ │ ├── WordCountMapper.java │ │ │ │ └── WordCountReducer.java │ │ │ │ └── utils │ │ │ │ └── WordCountDataUtils.java │ │ │ └── resources │ │ │ └── log4j.properties │ └── hdfs-java-api │ │ ├── pom.xml │ │ └── src │ │ ├── main │ │ └── java │ │ │ └── com │ │ │ └── heibaiying │ │ │ └── utils │ │ │ └── HdfsUtils.java │ │ └── test │ │ └── java │ │ └── HdfsTest.java ├── Hbase │ ├── hbase-java-api-1.x │ │ ├── pom.xml │ │ └── src │ │ │ ├── main │ │ │ └── java │ │ │ │ └── com │ │ │ │ └── heibaiying │ │ │ │ └── HBaseUtils.java │ │ │ └── test │ │ │ └── java │ │ │ └── com │ │ │ └── heibaiying │ │ │ └── HbaseUtilsTest.java │ ├── hbase-java-api-2.x │ │ ├── pom.xml │ │ └── src │ │ │ ├── main │ │ │ └── java │ │ │ │ └── com │ │ │ │ └── heibaiying │ │ │ │ └── HBaseUtils.java │ │ │ └── test │ │ │ └── java │ │ │ └── heibaiying │ │ │ └── HBaseUtilsTest.java │ └── hbase-observer-coprocessor │ │ ├── pom.xml │ │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── heibaiying │ │ └── AppendRegionObserver.java ├── Kafka │ └── kafka-basis │ │ ├── pom.xml │ │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── heibaiying │ │ ├── consumers │ │ ├── ConsumerASyn.java │ │ ├── ConsumerASynAndSyn.java │ │ ├── ConsumerASynWithOffsets.java │ │ ├── ConsumerExit.java │ │ ├── ConsumerGroup.java │ │ ├── ConsumerSyn.java │ │ ├── RebalanceListener.java │ │ └── StandaloneConsumer.java │ │ └── producers │ │ ├── ProducerASyn.java │ │ ├── ProducerSyn.java │ │ ├── ProducerWithPartitioner.java │ │ ├── SimpleProducer.java │ │ └── partitioners │ │ └── CustomPartitioner.java ├── Phoenix │ ├── spring-boot-mybatis-phoenix │ │ ├── pom.xml │ │ └── src │ │ │ ├── main │ │ │ ├── java │ │ │ │ └── com │ │ │ │ │ └── heibaiying │ │ │ │ │ └── springboot │ │ │ │ │ ├── SpringBootMybatisApplication.java │ │ │ │ │ ├── bean │ │ │ │ │ └── USPopulation.java │ │ │ │ │ └── dao │ │ │ │ │ └── PopulationDao.java │ │ │ └── resources │ │ │ │ └── application.yml │ │ │ └── test │ │ │ └── java │ │ │ └── com │ │ │ └── heibaiying │ │ │ └── springboot │ │ │ └── PopulationTest.java │ └── spring-mybatis-phoenix │ │ ├── pom.xml │ │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── heibaiying │ │ │ │ ├── bean │ │ │ │ └── USPopulation.java │ │ │ │ └── dao │ │ │ │ └── PopulationDao.java │ │ └── resources │ │ │ ├── jdbc.properties │ │ │ ├── mappers │ │ │ └── Population.xml │ │ │ ├── mybatisConfig.xml │ │ │ └── springApplication.xml │ │ └── test │ │ └── java │ │ └── com │ │ └── heibaiying │ │ └── dao │ │ └── PopulationDaoTest.java ├── Storm │ ├── storm-hbase-integration │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ └── java │ │ │ └── com │ │ │ └── heibaiying │ │ │ ├── WordCountToHBaseApp.java │ │ │ └── component │ │ │ ├── CountBolt.java │ │ │ ├── DataSourceSpout.java │ │ │ └── SplitBolt.java │ ├── storm-hdfs-integration │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ └── java │ │ │ └── com.heibaiying │ │ │ ├── DataToHdfsApp.java │ │ │ └── component │ │ │ └── DataSourceSpout.java │ ├── storm-kafka-integration │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ └── java │ │ │ └── com │ │ │ └── heibaiying │ │ │ └── kafka │ │ │ ├── read │ │ │ ├── LogConsoleBolt.java │ │ │ └── ReadingFromKafkaApp.java │ │ │ └── write │ │ │ ├── DataSourceSpout.java │ │ │ └── WritingToKafkaApp.java │ ├── storm-redis-integration │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ └── java │ │ │ └── com │ │ │ └── heibaiying │ │ │ ├── CustomRedisCountApp.java │ │ │ ├── WordCountToRedisApp.java │ │ │ └── component │ │ │ ├── CountBolt.java │ │ │ ├── DataSourceSpout.java │ │ │ ├── RedisCountStoreBolt.java │ │ │ ├── SplitBolt.java │ │ │ └── WordCountStoreMapper.java │ └── storm-word-count │ │ ├── pom.xml │ │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── heibaiying │ │ │ └── wordcount │ │ │ ├── ClusterWordCountApp.java │ │ │ ├── LocalWordCountApp.java │ │ │ └── component │ │ │ ├── CountBolt.java │ │ │ ├── DataSourceSpout.java │ │ │ └── SplitBolt.java │ │ └── resources │ │ └── assembly.xml ├── Zookeeper │ └── curator │ │ ├── pom.xml │ │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── heibaiying │ │ ├── AclOperation.java │ │ └── BasicOperation.java └── spark │ ├── spark-streaming-basis │ ├── pom.xml │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── heibaiying │ │ ├── NetworkWordCount.scala │ │ ├── NetworkWordCountToRedis.scala │ │ ├── NetworkWordCountV2.scala │ │ └── utils │ │ └── JedisPoolUtil.java │ ├── spark-streaming-flume │ ├── pom.xml │ └── src │ │ └── main │ │ └── scala │ │ └── com │ │ └── heibaiying │ │ └── flume │ │ ├── PullBasedWordCount.scala │ │ └── PushBasedWordCount.scala │ └── spark-streaming-kafka │ ├── pom.xml │ └── src │ └── main │ └── scala │ └── com │ └── heibaiying │ └── kafka │ └── KafkaDirectStream.scala ├── notes ├── Azkaban_Flow_1.0_的使用.md ├── Azkaban_Flow_2.0_的使用.md ├── Azkaban简介.md ├── Flink_Data_Sink.md ├── Flink_Data_Source.md ├── Flink_Data_Transformation.md ├── Flink_Windows.md ├── Flink开发环境搭建.md ├── Flink核心概念综述.md ├── Flink状态管理与检查点机制.md ├── Flume整合Kafka.md ├── Flume简介及基本使用.md ├── HDFS-Java-API.md ├── HDFS常用Shell命令.md ├── Hadoop-HDFS.md ├── Hadoop-MapReduce.md ├── Hadoop-YARN.md ├── Hbase_Java_API.md ├── Hbase_Shell.md ├── Hbase协处理器详解.md ├── Hbase容灾与备份.md ├── Hbase的SQL中间层_Phoenix.md ├── Hbase简介.md ├── Hbase系统架构及数据结构.md ├── Hbase过滤器详解.md ├── HiveCLI和Beeline命令行的基本使用.md ├── Hive分区表和分桶表.md ├── Hive常用DDL操作.md ├── Hive常用DML操作.md ├── Hive数据查询详解.md ├── Hive简介及核心概念.md ├── Hive视图和索引.md ├── Kafka消费者详解.md ├── Kafka深入理解分区副本机制.md ├── Kafka生产者详解.md ├── Kafka简介.md ├── Scala函数和闭包.md ├── Scala列表和集.md ├── Scala基本数据类型和运算符.md ├── Scala数组.md ├── Scala映射和元组.md ├── Scala模式匹配.md ├── Scala流程控制语句.md ├── Scala简介及开发环境配置.md ├── Scala类和对象.md ├── Scala类型参数.md ├── Scala继承和特质.md ├── Scala隐式转换和隐式参数.md ├── Scala集合类型.md ├── SparkSQL_Dataset和DataFrame简介.md ├── SparkSQL外部数据源.md ├── SparkSQL常用聚合函数.md ├── SparkSQL联结操作.md ├── Spark_RDD.md ├── Spark_Streaming与流处理.md ├── Spark_Streaming基本操作.md ├── Spark_Streaming整合Flume.md ├── Spark_Streaming整合Kafka.md ├── Spark_Structured_API的基本使用.md ├── Spark_Transformation和Action算子.md ├── Spark简介.md ├── Spark累加器与广播变量.md ├── Spark部署模式与作业提交.md ├── Spring+Mybtais+Phoenix整合.md ├── Sqoop基本使用.md ├── Sqoop简介与安装.md ├── Storm三种打包方式对比分析.md ├── Storm和流处理简介.md ├── Storm核心概念详解.md ├── Storm编程模型详解.md ├── Storm集成HBase和HDFS.md ├── Storm集成Kakfa.md ├── Storm集成Redis详解.md ├── Zookeeper_ACL权限控制.md ├── Zookeeper_Java客户端Curator.md ├── Zookeeper常用Shell命令.md ├── Zookeeper简介及核心概念.md ├── installation │ ├── Azkaban_3.x_编译及部署.md │ ├── Flink_Standalone_Cluster.md │ ├── HBase单机环境搭建.md │ ├── HBase集群环境搭建.md │ ├── Hadoop单机环境搭建.md │ ├── Hadoop集群环境搭建.md │ ├── Linux下Flume的安装.md │ ├── Linux下JDK安装.md │ ├── Linux下Python安装.md │ ├── Linux环境下Hive的安装部署.md │ ├── Spark开发环境搭建.md │ ├── Spark集群环境搭建.md │ ├── Storm单机环境搭建.md │ ├── Storm集群环境搭建.md │ ├── Zookeeper单机环境和集群环境搭建.md │ ├── 基于Zookeeper搭建Hadoop高可用集群.md │ ├── 基于Zookeeper搭建Kafka高可用集群.md │ └── 虚拟机静态IP及多IP配置.md ├── 大数据学习路线.md ├── 大数据常用软件安装指南.md ├── 大数据应用常用打包方式.md ├── 大数据技术栈思维导图.md └── 资料分享与工具推荐.md ├── pictures ├── 01_data_at_rest_infrastructure.png ├── 02_stream_processing_infrastructure.png ├── CustomRedisCountApp.png ├── Detailed-Hadoop-MapReduce-Data-Flow-14.png ├── Figure3Architecture-of-YARN.png ├── HADOOP-ECOSYSTEM-Edureka.png ├── HBaseArchitecture-Blog-Fig1.png ├── HBaseArchitecture-Blog-Fig2.png ├── HBaseArchitecture-Blog-Fig3.png ├── HBaseArchitecture-Blog-Fig4.png ├── HBaseArchitecture-Blog-Fig5.png ├── HBaseArchitecture-Blog-Fig6.png ├── HBaseArchitecture-Blog-Fig7.png ├── HBase_table-iteblog.png ├── HDFS-HA-Architecture-Edureka.png ├── HashMap-HashTable.png ├── Internal-Working-of-Apache-Storm.png ├── Phoenix-create-table.png ├── Phoenix-delete.png ├── Phoenix-hadoop.png ├── Phoenix-java-api-result.png ├── Phoenix-select.png ├── Phoenix-update.png ├── RegionObserver.png ├── RegionObservers-works.png ├── Stream groupings.png ├── WordCountToHBaseApp.png ├── akaban-jps.png ├── azkaban-click-edit.png ├── azkaban-create-project.png ├── azkaban-dependencies.png ├── azkaban-edit.png ├── azkaban-embeded-flow.png ├── azkaban-embeded-success.png ├── azkaban-execute.png ├── azkaban-flows.png ├── azkaban-gradle-wrapper-2.png ├── azkaban-gradle-wrapper.png ├── azkaban-hdfs.png ├── azkaban-hive-result.png ├── azkaban-hive.png ├── azkaban-log.png ├── azkaban-memory.png ├── azkaban-mr.png ├── azkaban-project-edit.png ├── azkaban-setting.png ├── azkaban-simle-result.png ├── azkaban-simple.png ├── azkaban-successed.png ├── azkaban-task-abcde-zip.png ├── azkaban-task-abcde.png ├── azkaban-upload.png ├── azkaban-web-ui.png ├── azkaban-web.png ├── azkaban-zip.png ├── azkaban.png ├── bigdata-notes-icon.png ├── bigdata-notes-icon.psd ├── blog-logo.png ├── curator-retry-policy.png ├── datasourcetohdfs.png ├── deprecated.png ├── flink-Rescaling.png ├── flink-RichParallelSourceFunction.png ├── flink-api-stack.png ├── flink-application-submission.png ├── flink-basis-project.png ├── flink-bounded-unbounded.png ├── flink-checkpoints-backend.png ├── flink-dashboard.png ├── flink-download.png ├── flink-kafka-datasource-console.png ├── flink-kafka-datasource-producer.png ├── flink-kafka-producer-consumer.png ├── flink-keyed-state.png ├── flink-lib.png ├── flink-maven-new.png ├── flink-maven-profile.png ├── flink-maven.png ├── flink-mysql-sink.png ├── flink-non-windowed.png ├── flink-on-yarn-session.jpg ├── flink-operator-state-para1.png ├── flink-operator-state-para2.png ├── flink-operator-state.png ├── flink-optional-components.png ├── flink-process.png ├── flink-richsink.png ├── flink-scala-shell.png ├── flink-session-windows.png ├── flink-sliding-windows.png ├── flink-socket-wordcount-stdout.png ├── flink-socket-wordcount.png ├── flink-stack.png ├── flink-standalone-cluster-ha.png ├── flink-standalone-cluster-jps.png ├── flink-standalone-cluster.jpg ├── flink-start-cluster-shell.png ├── flink-state-management.png ├── flink-stateful-stream.png ├── flink-stream-barriers.png ├── flink-subtask-slots.png ├── flink-task-parallelism.png ├── flink-task-subtask.png ├── flink-tasks-slots.png ├── flink-tumbling-windows.png ├── flink-window-word-count.png ├── flink-word-count.png ├── flink-yarn-session.png ├── flink.png ├── flume-architecture.png ├── flume-consolidation.png ├── flume-example-1.png ├── flume-example-2.png ├── flume-example-3.png ├── flume-example-4.png ├── flume-example-7.png ├── flume-example-8.png ├── flume-example-9.png ├── flume-kafka-01.png ├── flume-kafka-2.png ├── flume-kafka.png ├── flume-multi-agent-flow.png ├── flume-multiplexing-the-flow.png ├── flume-retry.png ├── flume-version.png ├── flume.png ├── full-stack-notes.png ├── future-of-spark.png ├── hadoop-QJM-同步机制.png ├── hadoop-code-mapping.png ├── hadoop-code-partitation.png ├── hadoop-code-reducer.png ├── hadoop-combiner.png ├── hadoop-ha高可用集群架构.png ├── hadoop-namenode主备切换.png ├── hadoop-no-combiner.png ├── hadoop-rm-ha-overview.png ├── hadoop-wordcountapp.png ├── hadoop-wordcountcombinerpartition.png ├── hadoop-yarn安装验证.png ├── hadoop-集群搭建2.png ├── hadoop-集群搭建3.png ├── hadoop-集群环境搭建.png ├── hadoop.jpg ├── hadoop安装验证.png ├── hadoop集群规划.png ├── hadoop高可用集群1.png ├── hadoop高可用集群2.png ├── hadoop高可用集群3.png ├── hadoop高可用集群4.png ├── hadoop高可用集群5.png ├── hadoop高可用集群规划.png ├── hbase-60010.png ├── hbase-Region-Server.png ├── hbase-arc.png ├── hbase-bytearraycomparable.png ├── hbase-co-unload.png ├── hbase-compareFilter.png ├── hbase-connection.png ├── hbase-coprocessor.png ├── hbase-copy-table.png ├── hbase-cp-hdfs.png ├── hbase-cp-helloworld.png ├── hbase-cp-lisi.png ├── hbase-cp-load.png ├── hbase-filterbase-subclass.png ├── hbase-fliter.png ├── hbase-hadoop.png ├── hbase-region-dis.png ├── hbase-region-splite.png ├── hbase-unload-test.png ├── hbase-web-ui-phoenix.png ├── hbase-web-ui.png ├── hbase-webtable.png ├── hbase-集群搭建1.png ├── hbase-集群搭建2.png ├── hbase.jpg ├── hbase.png ├── hbase集群规划.png ├── hdfs-read-1.jpg ├── hdfs-tolerance-1.jpg ├── hdfs-tolerance-2.jpg ├── hdfs-tolerance-3.jpg ├── hdfs-tolerance-4.jpg ├── hdfs-tolerance-5.jpg ├── hdfs-write-1.jpg ├── hdfs-write-2.jpg ├── hdfs-write-3.jpg ├── hdfs-机架.png ├── hdfsarchitecture.png ├── hdfsdatanodes.png ├── hive-1-2-view.png ├── hive-beeline-cli.png ├── hive-beeline.png ├── hive-data-type.png ├── hive-e.png ├── hive-emp-deptno-20-30.png ├── hive-emp-deptno-20.png ├── hive-emp-ptn.png ├── hive-emp-ts-2.png ├── hive-emp-ts.png ├── hive-emp.png ├── hive-external-table.png ├── hive-hadoop-bucket.png ├── hive-hadoop-mapreducer.png ├── hive-hadoop-partitation.png ├── hive-index-show.png ├── hive-index-table.png ├── hive-install-2.png ├── hive-mysql-tables.png ├── hive-mysql.png ├── hive-n-j.png ├── hive-order-by.png ├── hive-ouput.png ├── hive-right-join.png ├── hive-select-emp.png ├── hive-show-database.png ├── hive-view-properties.png ├── hive.jpg ├── hive体系架构.png ├── idea-newproject-scala.png ├── idea-scala-2.1.8.png ├── idea-scala-change.png ├── idea-scala-plugin.png ├── idea-scala-select.png ├── ifconfig.png ├── ipconfig.png ├── jar-with-dependencies.png ├── kafka-BIO.png ├── kafka-cluster-shell.png ├── kafka-cluster.png ├── kafka-compress-message.png ├── kafka-consumer01.png ├── kafka-consumer02.png ├── kafka-producer-consumer.png ├── kafka-send-messgaes.png ├── kafka-simple-producer.png ├── kafka-topic.png ├── kafka-元数据请求.png ├── kafka-分区副本.png ├── kafka-数据可见性.png ├── kafka-零拷贝.png ├── kafka.png ├── kafka消费者.png ├── mapreduce-combiner.png ├── mapreduce-sort.png ├── mapreduce-with-combiners.png ├── mapreduce-without-combiners.png ├── mapreduceProcess.png ├── mutli-net-ip.png ├── oozie.jpg ├── phoenix-core-jar.png ├── phoenix-shell.png ├── readfromkafka.png ├── relationships-worker-processes-executors-tasks.png ├── scala-collection-imm.png ├── scala-collection-m.png ├── scala-collection.png ├── scala-hello-world.png ├── scala-int+.png ├── scala-ordered-ordering.png ├── scala-other-resources.png ├── scala-plugin.png ├── scala-richInt.png ├── scala-sdk.png ├── scala-select.png ├── scala-shell.png ├── scala-分区数.png ├── scala-操作符优先级.png ├── scala-视图界定.png ├── scala.jpg ├── scala带有特质的对象.png ├── scala继承层次.png ├── spark-Big-table–to–big-table.png ├── spark-Big-table–to–small-table.png ├── spark-DAG.png ├── spark-Logical-Planning.png ├── spark-Physical-Planning.png ├── spark-Standalone-web-ui.png ├── spark-aggregateByKey.png ├── spark-dataFrame+RDDs.png ├── spark-download.png ├── spark-flume-console.png ├── spark-flume-input.png ├── spark-getpartnum.png ├── spark-mysql-分区上下限.png ├── spark-pi.png ├── spark-reducebykey.png ├── spark-scheme.png ├── spark-shell-local.png ├── spark-shell-web-ui.png ├── spark-shell.png ├── spark-sql-NATURAL-JOIN.png ├── spark-sql-shell.png ├── spark-sql-自定义函数.png ├── spark-stack.png ├── spark-straming-kafka-console.png ├── spark-streaming-arch.png ├── spark-streaming-dstream-ops.png ├── spark-streaming-flow.png ├── spark-streaming-flume-jar.png ├── spark-streaming-word-count-v1.png ├── spark-streaming-word-count-v2.png ├── spark-streaming-word-count-v3.png ├── spark-structure-api.png ├── spark-unifed.png ├── spark-web-ui.png ├── spark-内存不足.png ├── spark-内存不足2.png ├── spark-分区.png ├── spark-窄依赖和宽依赖.png ├── spark-累加器1.png ├── spark-累加器2.png ├── spark-累加器方法.png ├── spark-运行安全.png ├── spark-运行时类型安全.png ├── spark-集群搭建1.png ├── spark-集群搭建2.png ├── spark-集群搭建3.png ├── spark-集群搭建4.png ├── spark-集群搭建5.png ├── spark-集群模式.png ├── spark.jpg ├── spark集群规划.png ├── spout-bolt.png ├── spring-boot-mybatis-phoenix.png ├── spring-mybatis-phoenix.png ├── sql-hive-arch.png ├── sql-join.jpg ├── sqoop-help.png ├── sqoop-hive-hdfs.png ├── sqoop-hive-location.png ├── sqoop-list-databases.png ├── sqoop-map-task.png ├── sqoop-mysql-connect.png ├── sqoop-mysql-jar.png ├── sqoop-tool.png ├── sqoop-version-selected.png ├── sqoop-version.png ├── sqoop.png ├── sqoop_hbase.png ├── sqoop_hdfs_ls.png ├── sqoop_hive_error.png ├── sqoop_hive_success.png ├── sqoop_hive_table.png ├── sqoop_hive_tables.png ├── store-redis-manager.png ├── storm-Redis-Mapper.png ├── storm-abstractRedisBolt.png ├── storm-baseRichSpout.png ├── storm-baseRichbolt.png ├── storm-bolts.png ├── storm-flow.png ├── storm-hbase-result.png ├── storm-hdfs-result.png ├── storm-jar-complie-error.png ├── storm-jar.png ├── storm-jar2.png ├── storm-jedicCommands.png ├── storm-kafka-producer.png ├── storm-kafka-receiver.png ├── storm-lib.png ├── storm-list-kill.png ├── storm-package-error.png ├── storm-spouts.png ├── storm-streams.png ├── storm-submit-success.png ├── storm-topology.png ├── storm-tuples.png ├── storm-ui-actions.png ├── storm-web-ui.png ├── storm-word-count-console.png ├── storm-word-count-p.png ├── storm-wordcounttoredis.png ├── storm-集群-shell.png ├── storm-集群搭建1.png ├── storm-集群规划.png ├── storm.png ├── storm集群.png ├── storm集群搭建2.png ├── streaming-flow.png ├── strom-kafka-consumer.png ├── topology-tasks.png ├── virtualbox-multi-network.png ├── virtualbox启用网络.png ├── weixin-desc.png ├── weixin-normal.png ├── weixin.jpg ├── writetokafka.png ├── yarn-base.png ├── yarn工作原理.png ├── yarn工作原理简图.png ├── zookeeper-brocast.jpg ├── zookeeper-cluster.png ├── zookeeper-hadoop001.png ├── zookeeper-hadoop002.png ├── zookeeper-hadoop003.png ├── zookeeper-super.png ├── zookeeper-zkcomponents.jpg ├── zookeeper-zknamespace.jpg ├── zookeeper-zkservice.jpg ├── zookeeper.jpg ├── 大数据处理简化流程.png ├── 大数据技术栈思维导图.png └── 大数据技术栈思维导图.xmind └── resources ├── csv └── dept.csv ├── json ├── dept.json └── emp.json ├── mysql-connector-java-5.1.47.jar ├── orc └── dept.orc ├── parquet ├── dept.parquet └── emp.parquet ├── tsv ├── dept.tsv └── emp.tsv └── txt ├── dept.txt └── emp.txt /.gitignore: -------------------------------------------------------------------------------- 1 | *# 2 | *.iml 3 | *.ipr 4 | *.iws 5 | *.sw? 6 | *~ 7 | .#* 8 | .*.md.html 9 | .DS_Store 10 | .classpath 11 | .factorypath 12 | .gradle 13 | .idea 14 | .metadata 15 | .project 16 | .recommenders 17 | .settings 18 | .springBeans 19 | /build 20 | MANIFEST.MF 21 | _site/ 22 | activemq-data 23 | bin 24 | build 25 | build.log 26 | dependency-reduced-pom.xml 27 | dump.rdb 28 | interpolated*.xml 29 | lib/ 30 | manifest.yml 31 | overridedb.* 32 | settings.xml 33 | target 34 | classes 35 | out 36 | logs 37 | transaction-logs 38 | .flattened-pom.xml 39 | secrets.yml 40 | .gradletasknamecache 41 | .sts4-cache -------------------------------------------------------------------------------- /code/Flink/flink-basis-java/src/main/java/com/heibaiying/StreamingJob.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying; 2 | 3 | import org.apache.flink.api.java.operators.DataSource; 4 | import org.apache.flink.streaming.api.datastream.DataStreamSource; 5 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 6 | 7 | public class StreamingJob { 8 | 9 | private static final String ROOT_PATH = "D:\\BigData-Notes\\code\\Flink\\flink-basis-java\\src\\main\\resources\\"; 10 | 11 | public static void main(String[] args) throws Exception { 12 | 13 | final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 14 | DataStreamSource streamSource = env.readTextFile(ROOT_PATH + "log4j.properties"); 15 | streamSource.writeAsText(ROOT_PATH + "out").setParallelism(1); 16 | env.execute(); 17 | 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /code/Flink/flink-basis-java/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Licensed to the Apache Software Foundation (ASF) under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. The ASF licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | ################################################################################ 18 | 19 | log4j.rootLogger=INFO, console 20 | 21 | log4j.appender.console=org.apache.log4j.ConsoleAppender 22 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 23 | log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n 24 | -------------------------------------------------------------------------------- /code/Flink/flink-basis-scala/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Licensed to the Apache Software Foundation (ASF) under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. The ASF licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | ################################################################################ 18 | 19 | log4j.rootLogger=INFO, console 20 | 21 | log4j.appender.console=org.apache.log4j.ConsoleAppender 22 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 23 | log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n 24 | -------------------------------------------------------------------------------- /code/Flink/flink-basis-scala/src/main/resources/wordcount.txt: -------------------------------------------------------------------------------- 1 | a,a,a,a,a 2 | b,b,b 3 | c,c 4 | d,d 5 | -------------------------------------------------------------------------------- /code/Flink/flink-basis-scala/src/main/scala/com/heibaiying/WordCountBatch.scala: -------------------------------------------------------------------------------- 1 | package com.heibaiying 2 | 3 | import org.apache.flink.api.scala._ 4 | 5 | object WordCountBatch { 6 | 7 | def main(args: Array[String]): Unit = { 8 | val benv = ExecutionEnvironment.getExecutionEnvironment 9 | val dataSet = benv.readTextFile("D:\\BigData-Notes\\code\\Flink\\flink-basis-scala\\src\\main\\resources\\wordcount.txt") 10 | dataSet.flatMap { _.toLowerCase.split(",")} 11 | .filter (_.nonEmpty) 12 | .map { (_, 1) } 13 | .groupBy(0) 14 | .sum(1) 15 | .print() 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /code/Flink/flink-basis-scala/src/main/scala/com/heibaiying/WordCountStreaming.scala: -------------------------------------------------------------------------------- 1 | package com.heibaiying 2 | 3 | import org.apache.flink.streaming.api.scala._ 4 | import org.apache.flink.streaming.api.windowing.time.Time 5 | 6 | 7 | object WordCountStreaming { 8 | 9 | def main(args: Array[String]): Unit = { 10 | 11 | val senv = StreamExecutionEnvironment.getExecutionEnvironment 12 | 13 | val dataStream: DataStream[String] = senv.socketTextStream("192.168.0.229", 9999, '\n') 14 | dataStream.flatMap { line => line.toLowerCase.split(",") } 15 | .filter(_.nonEmpty) 16 | .map { word => (word, 1) } 17 | .keyBy(0) 18 | .timeWindow(Time.seconds(3)) 19 | .sum(1) 20 | .print() 21 | senv.execute("Streaming WordCount") 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /code/Flink/flink-kafka-integration/src/main/java/com/heibaiying/CustomSinkJob.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying; 2 | 3 | import com.heibaiying.bean.Employee; 4 | import com.heibaiying.sink.FlinkToMySQLSink; 5 | import org.apache.flink.streaming.api.datastream.DataStreamSource; 6 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 7 | 8 | import java.sql.Date; 9 | 10 | public class CustomSinkJob { 11 | 12 | public static void main(String[] args) throws Exception { 13 | 14 | final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 15 | Date date = new Date(System.currentTimeMillis()); 16 | DataStreamSource streamSource = env.fromElements( 17 | new Employee("hei", 10, date), 18 | new Employee("bai", 20, date), 19 | new Employee("ying", 30, date)); 20 | streamSource.addSink(new FlinkToMySQLSink()); 21 | env.execute(); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /code/Flink/flink-kafka-integration/src/main/java/com/heibaiying/bean/Employee.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.bean; 2 | 3 | import java.sql.Date; 4 | 5 | public class Employee { 6 | 7 | private String name; 8 | private int age; 9 | private Date birthday; 10 | 11 | Employee(){} 12 | 13 | public Employee(String name, int age, Date birthday) { 14 | this.name = name; 15 | this.age = age; 16 | this.birthday = birthday; 17 | } 18 | 19 | public String getName() { 20 | return name; 21 | } 22 | 23 | public void setName(String name) { 24 | this.name = name; 25 | } 26 | 27 | public int getAge() { 28 | return age; 29 | } 30 | 31 | public void setAge(int age) { 32 | this.age = age; 33 | } 34 | 35 | public Date getBirthday() { 36 | return birthday; 37 | } 38 | 39 | public void setBirthday(Date birthday) { 40 | this.birthday = birthday; 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /code/Flink/flink-kafka-integration/src/main/java/com/heibaiying/sink/FlinkToMySQLSink.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.sink; 2 | 3 | import com.heibaiying.bean.Employee; 4 | import org.apache.flink.configuration.Configuration; 5 | import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; 6 | 7 | import java.sql.Connection; 8 | import java.sql.DriverManager; 9 | import java.sql.PreparedStatement; 10 | 11 | public class FlinkToMySQLSink extends RichSinkFunction { 12 | 13 | private PreparedStatement stmt; 14 | private Connection conn; 15 | 16 | @Override 17 | public void open(Configuration parameters) throws Exception { 18 | Class.forName("com.mysql.cj.jdbc.Driver"); 19 | conn = DriverManager.getConnection("jdbc:mysql://192.168.0.229:3306/employees?characterEncoding=UTF-8&serverTimezone=UTC&useSSL=false", "root", "123456"); 20 | String sql = "insert into emp(name, age, birthday) values(?, ?, ?)"; 21 | stmt = conn.prepareStatement(sql); 22 | } 23 | 24 | @Override 25 | public void invoke(Employee value, Context context) throws Exception { 26 | stmt.setString(1, value.getName()); 27 | stmt.setInt(2, value.getAge()); 28 | stmt.setDate(3, value.getBirthday()); 29 | stmt.executeUpdate(); 30 | } 31 | 32 | @Override 33 | public void close() throws Exception { 34 | super.close(); 35 | if (stmt != null) { 36 | stmt.close(); 37 | } 38 | if (conn != null) { 39 | conn.close(); 40 | } 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /code/Flink/flink-kafka-integration/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Licensed to the Apache Software Foundation (ASF) under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. The ASF licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | ################################################################################ 18 | 19 | log4j.rootLogger=INFO, console 20 | 21 | log4j.appender.console=org.apache.log4j.ConsoleAppender 22 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 23 | log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n 24 | -------------------------------------------------------------------------------- /code/Flink/flink-state-management/src/main/java/com/heibaiying/keyedstate/KeyedStateJob.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.keyedstate; 2 | 3 | import org.apache.flink.api.java.tuple.Tuple2; 4 | import org.apache.flink.streaming.api.datastream.DataStreamSource; 5 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 6 | 7 | 8 | public class KeyedStateJob { 9 | 10 | public static void main(String[] args) throws Exception { 11 | final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 12 | DataStreamSource> tuple2DataStreamSource = env.fromElements( 13 | Tuple2.of("a", 50L), Tuple2.of("a", 80L), Tuple2.of("a", 400L), 14 | Tuple2.of("a", 100L), Tuple2.of("a", 200L), Tuple2.of("a", 200L), 15 | Tuple2.of("b", 100L), Tuple2.of("b", 200L), Tuple2.of("b", 200L), 16 | Tuple2.of("b", 500L), Tuple2.of("b", 600L), Tuple2.of("b", 700L)); 17 | tuple2DataStreamSource 18 | .keyBy(0) 19 | .flatMap(new ThresholdWarning(100L, 3)) 20 | .printToErr(); 21 | env.execute("Managed Keyed State"); 22 | } 23 | 24 | } 25 | -------------------------------------------------------------------------------- /code/Flink/flink-state-management/src/main/java/com/heibaiying/keyedstate/ThresholdWarning.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.keyedstate; 2 | 3 | import org.apache.flink.api.common.functions.RichFlatMapFunction; 4 | import org.apache.flink.api.common.state.ListState; 5 | import org.apache.flink.api.common.state.ListStateDescriptor; 6 | import org.apache.flink.api.java.tuple.Tuple2; 7 | import org.apache.flink.configuration.Configuration; 8 | import org.apache.flink.shaded.guava18.com.google.common.collect.Lists; 9 | import org.apache.flink.util.Collector; 10 | 11 | import java.util.ArrayList; 12 | import java.util.List; 13 | 14 | public class ThresholdWarning extends RichFlatMapFunction, Tuple2>> { 15 | 16 | // 通过ListState来存储非正常数据的状态 17 | private transient ListState abnormalData; 18 | // 需要监控阈值 19 | private Long threshold; 20 | // 达到阈值多少次后触发报警 21 | private Integer numberOfTimes; 22 | 23 | ThresholdWarning(Long threshold, Integer numberOfTimes) { 24 | this.threshold = threshold; 25 | this.numberOfTimes = numberOfTimes; 26 | } 27 | 28 | @Override 29 | public void open(Configuration parameters) { 30 | // 通过状态名称(句柄)获取状态实例,如果不存在则会自动创建 31 | abnormalData = getRuntimeContext().getListState(new ListStateDescriptor<>("abnormalData", Long.class)); 32 | } 33 | 34 | @Override 35 | public void flatMap(Tuple2 value, Collector>> out) throws Exception { 36 | Long inputValue = value.f1; 37 | // 如果输入值超过阈值,则记录该次不正常的数据信息 38 | if (inputValue >= threshold) { 39 | abnormalData.add(inputValue); 40 | } 41 | ArrayList list = Lists.newArrayList(abnormalData.get().iterator()); 42 | // 如果不正常的数据出现达到一定次数,则输出报警信息 43 | if (list.size() >= numberOfTimes) { 44 | out.collect(Tuple2.of(value.f0 + " 超过指定阈值 ", list)); 45 | // 报警信息输出后,清空暂存的状态 46 | abnormalData.clear(); 47 | } 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /code/Flink/flink-state-management/src/main/java/com/heibaiying/operatorstate/OperatorStateJob.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.operatorstate; 2 | 3 | import org.apache.flink.api.java.tuple.Tuple2; 4 | import org.apache.flink.streaming.api.datastream.DataStreamSource; 5 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 6 | 7 | 8 | public class OperatorStateJob { 9 | 10 | public static void main(String[] args) throws Exception { 11 | final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 12 | // 开启检查点机制 13 | env.enableCheckpointing(1000); 14 | // 设置并行度为1 15 | DataStreamSource> tuple2DataStreamSource = env.setParallelism(1).fromElements( 16 | Tuple2.of("a", 50L), Tuple2.of("a", 80L), Tuple2.of("a", 400L), 17 | Tuple2.of("a", 100L), Tuple2.of("a", 200L), Tuple2.of("a", 200L), 18 | Tuple2.of("b", 100L), Tuple2.of("b", 200L), Tuple2.of("b", 200L), 19 | Tuple2.of("b", 500L), Tuple2.of("b", 600L), Tuple2.of("b", 700L)); 20 | tuple2DataStreamSource 21 | .flatMap(new ThresholdWarning(100L, 3)) 22 | .printToErr(); 23 | env.execute("Managed Keyed State"); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /code/Flink/flink-state-management/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Licensed to the Apache Software Foundation (ASF) under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. The ASF licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | ################################################################################ 18 | 19 | log4j.rootLogger=INFO, console 20 | 21 | log4j.appender.console=org.apache.log4j.ConsoleAppender 22 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 23 | log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n 24 | -------------------------------------------------------------------------------- /code/Hadoop/hadoop-word-count/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.heibaiying 8 | hadoop-word-count 9 | 1.0 10 | 11 | 12 | 13 | org.apache.maven.plugins 14 | maven-compiler-plugin 15 | 16 | 8 17 | 8 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | UTF-8 26 | 2.6.0-cdh5.15.2 27 | 28 | 29 | 30 | 31 | 32 | 33 | cloudera 34 | https://repository.cloudera.com/artifactory/cloudera-repos/ 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | org.apache.hadoop 43 | hadoop-client 44 | ${hadoop.version} 45 | 46 | 47 | 48 | org.apache.commons 49 | commons-lang3 50 | 3.8.1 51 | 52 | 53 | 54 | 55 | -------------------------------------------------------------------------------- /code/Hadoop/hadoop-word-count/src/main/java/com/heibaiying/component/CustomPartitioner.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.component; 2 | 3 | import com.heibaiying.utils.WordCountDataUtils; 4 | import org.apache.hadoop.io.IntWritable; 5 | import org.apache.hadoop.io.Text; 6 | import org.apache.hadoop.mapreduce.Partitioner; 7 | 8 | /** 9 | * 自定义partitioner,按照单词分区 10 | */ 11 | public class CustomPartitioner extends Partitioner { 12 | 13 | public int getPartition(Text text, IntWritable intWritable, int numPartitions) { 14 | return WordCountDataUtils.WORD_LIST.indexOf(text.toString()); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /code/Hadoop/hadoop-word-count/src/main/java/com/heibaiying/component/WordCountMapper.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.component; 2 | 3 | import org.apache.hadoop.io.IntWritable; 4 | import org.apache.hadoop.io.LongWritable; 5 | import org.apache.hadoop.io.Text; 6 | import org.apache.hadoop.mapreduce.Mapper; 7 | 8 | import java.io.IOException; 9 | 10 | /** 11 | * 将每行数据按照指定分隔符进行拆分 12 | */ 13 | public class WordCountMapper extends Mapper { 14 | 15 | @Override 16 | protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { 17 | String[] words = value.toString().split("\t"); 18 | for (String word : words) { 19 | context.write(new Text(word), new IntWritable(1)); 20 | } 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /code/Hadoop/hadoop-word-count/src/main/java/com/heibaiying/component/WordCountReducer.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.component; 2 | 3 | import org.apache.hadoop.io.IntWritable; 4 | import org.apache.hadoop.io.Text; 5 | import org.apache.hadoop.mapreduce.Reducer; 6 | 7 | import java.io.IOException; 8 | 9 | /** 10 | * 进行词频统计 11 | */ 12 | public class WordCountReducer extends Reducer { 13 | 14 | @Override 15 | protected void reduce(Text key, Iterable values, Context context) throws IOException, InterruptedException { 16 | int count = 0; 17 | for (IntWritable value : values) { 18 | count += value.get(); 19 | } 20 | context.write(key, new IntWritable(count)); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /code/Hadoop/hadoop-word-count/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=INFO,CONSOLE 2 | log4j.addivity.org.apache=false 3 | 4 | log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender 5 | log4j.appender.CONSOLE.Threshold=INFO 6 | log4j.appender.CONSOLE.layout.ConversionPattern=%d{yyyy-MM-dd HH\:mm\:ss} -%-4r [%t] %-5p %x - %m%n 7 | log4j.appender.CONSOLE.Target=System.out 8 | log4j.appender.CONSOLE.Encoding=UTF-8 9 | log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout 10 | -------------------------------------------------------------------------------- /code/Hadoop/hdfs-java-api/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.heibaiying 8 | hdfs-java-api 9 | 1.0 10 | 11 | 12 | 13 | UTF-8 14 | 2.6.0-cdh5.15.2 15 | 16 | 17 | 18 | 19 | 20 | 21 | cloudera 22 | https://repository.cloudera.com/artifactory/cloudera-repos/ 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | org.apache.hadoop 31 | hadoop-client 32 | ${hadoop.version} 33 | 34 | 35 | junit 36 | junit 37 | 4.12 38 | test 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /code/Hbase/hbase-java-api-1.x/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.heibaiying 8 | hbase-java-api-1.x 9 | 1.0-SNAPSHOT 10 | 11 | 12 | 13 | org.apache.maven.plugins 14 | maven-compiler-plugin 15 | 16 | 8 17 | 8 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | org.apache.hbase 27 | hbase-client 28 | 1.2.0 29 | 30 | 31 | junit 32 | junit 33 | 4.12 34 | test 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /code/Hbase/hbase-java-api-2.x/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.heibaiying 8 | hbase-java-api-2.x 9 | 1.0-SNAPSHOT 10 | 11 | 12 | 13 | org.apache.maven.plugins 14 | maven-compiler-plugin 15 | 16 | 8 17 | 8 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | org.apache.hbase 26 | hbase-client 27 | 2.1.4 28 | 29 | 30 | junit 31 | junit 32 | 4.12 33 | test 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /code/Hbase/hbase-observer-coprocessor/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.heibaiying 8 | hbase-observer-coprocessor 9 | 1.0-SNAPSHOT 10 | 11 | 12 | 13 | org.apache.hbase 14 | hbase-common 15 | 1.2.0 16 | 17 | 18 | org.apache.hbase 19 | hbase-server 20 | 1.2.0 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /code/Kafka/kafka-basis/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.heibaiying 8 | kafka-basis 9 | 1.0 10 | 11 | 12 | 13 | org.apache.maven.plugins 14 | maven-compiler-plugin 15 | 16 | 8 17 | 8 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | org.apache.kafka 27 | kafka-clients 28 | 2.2.0 29 | 30 | 31 | org.apache.kafka 32 | kafka_2.12 33 | 2.2.0 34 | 35 | 36 | org.slf4j 37 | slf4j-nop 38 | 1.7.25 39 | 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /code/Kafka/kafka-basis/src/main/java/com/heibaiying/consumers/ConsumerASynAndSyn.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.consumers; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerRecord; 4 | import org.apache.kafka.clients.consumer.ConsumerRecords; 5 | import org.apache.kafka.clients.consumer.KafkaConsumer; 6 | 7 | import java.time.Duration; 8 | import java.time.temporal.ChronoUnit; 9 | import java.util.Collections; 10 | import java.util.Properties; 11 | 12 | /** 13 | * Kafka消费者——同步加异步提交 14 | */ 15 | public class ConsumerASynAndSyn { 16 | 17 | public static void main(String[] args) { 18 | String topic = "Hello-Kafka"; 19 | String group = "group1"; 20 | Properties props = new Properties(); 21 | props.put("bootstrap.servers", "hadoop001:9092"); 22 | props.put("group.id", group); 23 | props.put("enable.auto.commit", false); 24 | props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 25 | props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 26 | KafkaConsumer consumer = new KafkaConsumer<>(props); 27 | 28 | consumer.subscribe(Collections.singletonList(topic)); 29 | 30 | try { 31 | while (true) { 32 | ConsumerRecords records = consumer.poll(Duration.of(100, ChronoUnit.MILLIS)); 33 | for (ConsumerRecord record : records) { 34 | System.out.println(record); 35 | } 36 | // 异步提交 37 | consumer.commitAsync(); 38 | } 39 | } catch (Exception e) { 40 | e.printStackTrace(); 41 | } finally { 42 | try { 43 | // 因为即将要关闭消费者,所以要用同步提交保证提交成功 44 | consumer.commitSync(); 45 | } finally { 46 | consumer.close(); 47 | } 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /code/Kafka/kafka-basis/src/main/java/com/heibaiying/consumers/ConsumerGroup.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.consumers; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerRecord; 4 | import org.apache.kafka.clients.consumer.ConsumerRecords; 5 | import org.apache.kafka.clients.consumer.KafkaConsumer; 6 | 7 | import java.time.Duration; 8 | import java.time.temporal.ChronoUnit; 9 | import java.util.Collections; 10 | import java.util.Properties; 11 | 12 | 13 | /** 14 | * Kafka消费者和消费者组 15 | */ 16 | public class ConsumerGroup { 17 | 18 | public static void main(String[] args) { 19 | String topic = "Hello-Kafka"; 20 | String group = "group1"; 21 | Properties props = new Properties(); 22 | props.put("bootstrap.servers", "hadoop001:9092"); 23 | /*指定分组ID*/ 24 | props.put("group.id", group); 25 | props.put("enable.auto.commit", true); 26 | props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 27 | props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 28 | KafkaConsumer consumer = new KafkaConsumer<>(props); 29 | 30 | /*订阅主题(s)*/ 31 | consumer.subscribe(Collections.singletonList(topic)); 32 | 33 | try { 34 | while (true) { 35 | /*轮询获取数据*/ 36 | ConsumerRecords records = consumer.poll(Duration.of(100, ChronoUnit.MILLIS)); 37 | for (ConsumerRecord record : records) { 38 | System.out.printf("topic = %s,partition = %d, key = %s, value = %s, offset = %d,\n", 39 | record.topic(), record.partition(), record.key(), record.value(), record.offset()); 40 | } 41 | } 42 | } finally { 43 | consumer.close(); 44 | } 45 | 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /code/Kafka/kafka-basis/src/main/java/com/heibaiying/consumers/ConsumerSyn.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.consumers; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerRecord; 4 | import org.apache.kafka.clients.consumer.ConsumerRecords; 5 | import org.apache.kafka.clients.consumer.KafkaConsumer; 6 | 7 | import java.time.Duration; 8 | import java.time.temporal.ChronoUnit; 9 | import java.util.Collections; 10 | import java.util.Properties; 11 | 12 | /** 13 | * Kafka消费者——同步提交 14 | */ 15 | public class ConsumerSyn { 16 | 17 | public static void main(String[] args) { 18 | String topic = "Hello-Kafka"; 19 | String group = "group1"; 20 | Properties props = new Properties(); 21 | props.put("bootstrap.servers", "hadoop001:9092"); 22 | props.put("group.id", group); 23 | props.put("enable.auto.commit", false); 24 | props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 25 | props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 26 | KafkaConsumer consumer = new KafkaConsumer<>(props); 27 | 28 | consumer.subscribe(Collections.singletonList(topic)); 29 | 30 | try { 31 | while (true) { 32 | ConsumerRecords records = consumer.poll(Duration.of(100, ChronoUnit.MILLIS)); 33 | for (ConsumerRecord record : records) { 34 | System.out.println(record); 35 | } 36 | /*同步提交*/ 37 | consumer.commitSync(); 38 | } 39 | } finally { 40 | consumer.close(); 41 | } 42 | 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /code/Kafka/kafka-basis/src/main/java/com/heibaiying/producers/ProducerASyn.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.producers; 2 | 3 | import org.apache.kafka.clients.producer.*; 4 | 5 | import java.util.Properties; 6 | 7 | /* 8 | * Kafka生产者示例——异步发送消息 9 | */ 10 | public class ProducerASyn { 11 | 12 | public static void main(String[] args) { 13 | 14 | String topicName = "Hello-Kafka"; 15 | 16 | Properties props = new Properties(); 17 | props.put("bootstrap.servers", "hadoop001:9092"); 18 | props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 19 | props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 20 | /*创建生产者*/ 21 | Producer producer = new KafkaProducer<>(props); 22 | 23 | for (int i = 0; i < 10; i++) { 24 | ProducerRecord record = new ProducerRecord<>(topicName, "k" + i, "world" + i); 25 | /*异步发送消息,并监听回调*/ 26 | producer.send(record, new Callback() { 27 | @Override 28 | public void onCompletion(RecordMetadata metadata, Exception exception) { 29 | if (exception != null) { 30 | System.out.println("进行异常处理"); 31 | } else { 32 | System.out.printf("topic=%s, partition=%d, offset=%s \n", 33 | metadata.topic(), metadata.partition(), metadata.offset()); 34 | } 35 | } 36 | }); 37 | } 38 | 39 | /*关闭生产者*/ 40 | producer.close(); 41 | } 42 | } -------------------------------------------------------------------------------- /code/Kafka/kafka-basis/src/main/java/com/heibaiying/producers/ProducerSyn.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.producers; 2 | 3 | import org.apache.kafka.clients.producer.KafkaProducer; 4 | import org.apache.kafka.clients.producer.Producer; 5 | import org.apache.kafka.clients.producer.ProducerRecord; 6 | import org.apache.kafka.clients.producer.RecordMetadata; 7 | 8 | import java.util.Properties; 9 | import java.util.concurrent.ExecutionException; 10 | 11 | /* 12 | * Kafka生产者示例——同步发送消息 13 | */ 14 | public class ProducerSyn { 15 | 16 | public static void main(String[] args) { 17 | 18 | String topicName = "Hello-Kafka"; 19 | 20 | Properties props = new Properties(); 21 | props.put("bootstrap.servers", "hadoop001:9092"); 22 | props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 23 | props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 24 | /*创建生产者*/ 25 | Producer producer = new KafkaProducer<>(props); 26 | 27 | for (int i = 0; i < 10; i++) { 28 | try { 29 | ProducerRecord record = new ProducerRecord<>(topicName, "k" + i, "world" + i); 30 | /*同步发送消息*/ 31 | RecordMetadata metadata = producer.send(record).get(); 32 | System.out.printf("topic=%s, partition=%d, offset=%s \n", 33 | metadata.topic(), metadata.partition(), metadata.offset()); 34 | } catch (InterruptedException | ExecutionException e) { 35 | e.printStackTrace(); 36 | } 37 | } 38 | 39 | /*关闭生产者*/ 40 | producer.close(); 41 | } 42 | } -------------------------------------------------------------------------------- /code/Kafka/kafka-basis/src/main/java/com/heibaiying/producers/ProducerWithPartitioner.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.producers; 2 | 3 | import org.apache.kafka.clients.producer.*; 4 | 5 | import java.util.Properties; 6 | 7 | /* 8 | * Kafka生产者示例——异步发送消息 9 | */ 10 | public class ProducerWithPartitioner { 11 | 12 | public static void main(String[] args) { 13 | 14 | String topicName = "Kafka-Partitioner-Test"; 15 | 16 | Properties props = new Properties(); 17 | props.put("bootstrap.servers", "hadoop001:9092"); 18 | props.put("key.serializer", "org.apache.kafka.common.serialization.IntegerSerializer"); 19 | props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 20 | 21 | /*传递自定义分区器*/ 22 | props.put("partitioner.class", "com.heibaiying.producers.partitioners.CustomPartitioner"); 23 | /*传递分区器所需的参数*/ 24 | props.put("pass.line", 6); 25 | 26 | Producer producer = new KafkaProducer<>(props); 27 | 28 | for (int i = 0; i <= 10; i++) { 29 | String score = "score:" + i; 30 | ProducerRecord record = new ProducerRecord<>(topicName, i, score); 31 | /*异步发送消息*/ 32 | producer.send(record, (metadata, exception) -> 33 | System.out.printf("%s, partition=%d, \n", score, metadata.partition())); 34 | } 35 | 36 | producer.close(); 37 | } 38 | } -------------------------------------------------------------------------------- /code/Kafka/kafka-basis/src/main/java/com/heibaiying/producers/SimpleProducer.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.producers; 2 | 3 | import org.apache.kafka.clients.producer.KafkaProducer; 4 | import org.apache.kafka.clients.producer.Producer; 5 | import org.apache.kafka.clients.producer.ProducerRecord; 6 | 7 | import java.util.Properties; 8 | 9 | /* 10 | * Kafka生产者示例 11 | */ 12 | 13 | public class SimpleProducer { 14 | 15 | public static void main(String[] args) { 16 | 17 | String topicName = "Hello-Kafka"; 18 | 19 | Properties props = new Properties(); 20 | props.put("bootstrap.servers", "hadoop001:9092"); 21 | props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 22 | props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 23 | /*创建生产者*/ 24 | Producer producer = new KafkaProducer<>(props); 25 | 26 | for (int i = 0; i < 10; i++) { 27 | ProducerRecord record = new ProducerRecord<>(topicName, "hello" + i, "world" + i); 28 | /* 发送消息*/ 29 | producer.send(record); 30 | } 31 | 32 | /*关闭生产者*/ 33 | producer.close(); 34 | } 35 | } -------------------------------------------------------------------------------- /code/Kafka/kafka-basis/src/main/java/com/heibaiying/producers/partitioners/CustomPartitioner.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.producers.partitioners; 2 | 3 | import org.apache.kafka.clients.producer.Partitioner; 4 | import org.apache.kafka.common.Cluster; 5 | 6 | import java.util.Map; 7 | 8 | /** 9 | * 自定义分区器 10 | */ 11 | public class CustomPartitioner implements Partitioner { 12 | 13 | private int passLine; 14 | 15 | @Override 16 | public void configure(Map configs) { 17 | passLine = (Integer) configs.get("pass.line"); 18 | } 19 | 20 | @Override 21 | public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { 22 | return (Integer) key >= passLine ? 1 : 0; 23 | } 24 | 25 | @Override 26 | public void close() { 27 | System.out.println("分区器关闭"); 28 | } 29 | 30 | 31 | } 32 | -------------------------------------------------------------------------------- /code/Phoenix/spring-boot-mybatis-phoenix/src/main/java/com/heibaiying/springboot/SpringBootMybatisApplication.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.springboot; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class SpringBootMybatisApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(SpringBootMybatisApplication.class, args); 11 | } 12 | 13 | } 14 | 15 | -------------------------------------------------------------------------------- /code/Phoenix/spring-boot-mybatis-phoenix/src/main/java/com/heibaiying/springboot/bean/USPopulation.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.springboot.bean; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | import lombok.ToString; 7 | 8 | @Data 9 | @AllArgsConstructor 10 | @NoArgsConstructor 11 | @ToString 12 | public class USPopulation { 13 | 14 | private String state; 15 | private String city; 16 | private long population; 17 | 18 | } 19 | -------------------------------------------------------------------------------- /code/Phoenix/spring-boot-mybatis-phoenix/src/main/java/com/heibaiying/springboot/dao/PopulationDao.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.springboot.dao; 2 | 3 | import com.heibaiying.springboot.bean.USPopulation; 4 | import org.apache.ibatis.annotations.*; 5 | 6 | import java.util.List; 7 | 8 | @Mapper 9 | public interface PopulationDao { 10 | 11 | @Select("SELECT * from us_population") 12 | List queryAll(); 13 | 14 | @Insert("UPSERT INTO us_population VALUES( #{state}, #{city}, #{population} )") 15 | void save(USPopulation USPopulation); 16 | 17 | @Select("SELECT * FROM us_population WHERE state=#{state} AND city = #{city}") 18 | USPopulation queryByStateAndCity(String state, String city); 19 | 20 | 21 | @Delete("DELETE FROM us_population WHERE state=#{state} AND city = #{city}") 22 | void deleteByStateAndCity(String state, String city); 23 | 24 | } 25 | -------------------------------------------------------------------------------- /code/Phoenix/spring-boot-mybatis-phoenix/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | datasource: 3 | #zookeeper地址 4 | url: jdbc:phoenix:192.168.0.105:2181 5 | driver-class-name: org.apache.phoenix.jdbc.PhoenixDriver 6 | 7 | # 如果不想配置对数据库连接池做特殊配置的话,以下关于连接池的配置就不是必须的 8 | # spring-boot 2.X 默认采用高性能的 Hikari 作为连接池 更多配置可以参考 https://github.com/brettwooldridge/HikariCP#configuration-knobs-baby 9 | type: com.zaxxer.hikari.HikariDataSource 10 | hikari: 11 | # 池中维护的最小空闲连接数 12 | minimum-idle: 10 13 | # 池中最大连接数,包括闲置和使用中的连接 14 | maximum-pool-size: 20 15 | # 此属性控制从池返回的连接的默认自动提交行为。默认为true 16 | auto-commit: true 17 | # 允许最长空闲时间 18 | idle-timeout: 30000 19 | # 此属性表示连接池的用户定义名称,主要显示在日志记录和JMX管理控制台中,以标识池和池配置。 默认值:自动生成 20 | pool-name: custom-hikari 21 | #此属性控制池中连接的最长生命周期,值0表示无限生命周期,默认1800000即30分钟 22 | max-lifetime: 1800000 23 | # 数据库连接超时时间,默认30秒,即30000 24 | connection-timeout: 30000 25 | # 连接测试sql 这个地方需要根据数据库方言差异而配置 例如 oracle 就应该写成 select 1 from dual 26 | connection-test-query: SELECT 1 27 | 28 | # mybatis 相关配置 29 | mybatis: 30 | configuration: 31 | # 是否打印sql语句 调试的时候可以开启 32 | log-impl: org.apache.ibatis.logging.stdout.StdOutImpl -------------------------------------------------------------------------------- /code/Phoenix/spring-boot-mybatis-phoenix/src/test/java/com/heibaiying/springboot/PopulationTest.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.springboot; 2 | 3 | import com.heibaiying.springboot.bean.USPopulation; 4 | import com.heibaiying.springboot.dao.PopulationDao; 5 | import org.junit.Test; 6 | import org.junit.runner.RunWith; 7 | import org.springframework.beans.factory.annotation.Autowired; 8 | import org.springframework.boot.test.context.SpringBootTest; 9 | import org.springframework.test.context.junit4.SpringRunner; 10 | 11 | import java.util.List; 12 | 13 | @RunWith(SpringRunner.class) 14 | @SpringBootTest 15 | public class PopulationTest { 16 | 17 | @Autowired 18 | private PopulationDao populationDao; 19 | 20 | @Test 21 | public void queryAll() { 22 | List USPopulationList = populationDao.queryAll(); 23 | if (USPopulationList != null) { 24 | for (USPopulation USPopulation : USPopulationList) { 25 | System.out.println(USPopulation.getCity() + " " + USPopulation.getPopulation()); 26 | } 27 | } 28 | } 29 | 30 | @Test 31 | public void save() { 32 | populationDao.save(new USPopulation("TX", "Dallas", 66666)); 33 | USPopulation usPopulation = populationDao.queryByStateAndCity("TX", "Dallas"); 34 | System.out.println(usPopulation); 35 | } 36 | 37 | @Test 38 | public void update() { 39 | populationDao.save(new USPopulation("TX", "Dallas", 99999)); 40 | USPopulation usPopulation = populationDao.queryByStateAndCity("TX", "Dallas"); 41 | System.out.println(usPopulation); 42 | } 43 | 44 | 45 | @Test 46 | public void delete() { 47 | populationDao.deleteByStateAndCity("TX", "Dallas"); 48 | USPopulation usPopulation = populationDao.queryByStateAndCity("TX", "Dallas"); 49 | System.out.println(usPopulation); 50 | } 51 | 52 | } 53 | 54 | -------------------------------------------------------------------------------- /code/Phoenix/spring-mybatis-phoenix/src/main/java/com/heibaiying/bean/USPopulation.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.bean; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | @Data 8 | @AllArgsConstructor 9 | @NoArgsConstructor 10 | public class USPopulation { 11 | 12 | private String state; 13 | private String city; 14 | private long population; 15 | } 16 | -------------------------------------------------------------------------------- /code/Phoenix/spring-mybatis-phoenix/src/main/java/com/heibaiying/dao/PopulationDao.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.dao; 2 | 3 | import com.heibaiying.bean.USPopulation; 4 | import org.apache.ibatis.annotations.Param; 5 | 6 | import java.util.List; 7 | 8 | public interface PopulationDao { 9 | 10 | List queryAll(); 11 | 12 | void save(USPopulation USPopulation); 13 | 14 | USPopulation queryByStateAndCity(@Param("state") String state, @Param("city") String city); 15 | 16 | void deleteByStateAndCity(@Param("state") String state, @Param("city") String city); 17 | } 18 | -------------------------------------------------------------------------------- /code/Phoenix/spring-mybatis-phoenix/src/main/resources/jdbc.properties: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/code/Phoenix/spring-mybatis-phoenix/src/main/resources/jdbc.properties -------------------------------------------------------------------------------- /code/Phoenix/spring-mybatis-phoenix/src/main/resources/mappers/Population.xml: -------------------------------------------------------------------------------- 1 | 4 | 5 | 6 | 7 | 8 | 11 | 12 | 13 | UPSERT INTO us_population VALUES( #{state}, #{city}, #{population} ) 14 | 15 | 16 | 19 | 20 | 21 | DELETE FROM us_population WHERE state=#{state} AND city = #{city} 22 | 23 | 24 | -------------------------------------------------------------------------------- /code/Phoenix/spring-mybatis-phoenix/src/main/resources/mybatisConfig.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /code/Phoenix/spring-mybatis-phoenix/src/main/resources/springApplication.xml: -------------------------------------------------------------------------------- 1 | 2 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /code/Phoenix/spring-mybatis-phoenix/src/test/java/com/heibaiying/dao/PopulationDaoTest.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.dao; 2 | 3 | import com.heibaiying.bean.USPopulation; 4 | import org.junit.Test; 5 | import org.junit.runner.RunWith; 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | import org.springframework.test.context.ContextConfiguration; 8 | import org.springframework.test.context.junit4.SpringRunner; 9 | 10 | import java.util.List; 11 | 12 | @RunWith(SpringRunner.class) 13 | @ContextConfiguration({"classpath:springApplication.xml"}) 14 | public class PopulationDaoTest { 15 | 16 | @Autowired 17 | private PopulationDao populationDao; 18 | 19 | @Test 20 | public void queryAll() { 21 | List USPopulationList = populationDao.queryAll(); 22 | if (USPopulationList != null) { 23 | for (USPopulation USPopulation : USPopulationList) { 24 | System.out.println(USPopulation.getCity() + " " + USPopulation.getPopulation()); 25 | } 26 | } 27 | } 28 | 29 | @Test 30 | public void save() { 31 | populationDao.save(new USPopulation("TX", "Dallas", 66666)); 32 | USPopulation usPopulation = populationDao.queryByStateAndCity("TX", "Dallas"); 33 | System.out.println(usPopulation); 34 | } 35 | 36 | @Test 37 | public void update() { 38 | populationDao.save(new USPopulation("TX", "Dallas", 99999)); 39 | USPopulation usPopulation = populationDao.queryByStateAndCity("TX", "Dallas"); 40 | System.out.println(usPopulation); 41 | } 42 | 43 | 44 | @Test 45 | public void delete() { 46 | populationDao.deleteByStateAndCity("TX", "Dallas"); 47 | USPopulation usPopulation = populationDao.queryByStateAndCity("TX", "Dallas"); 48 | System.out.println(usPopulation); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /code/Storm/storm-hbase-integration/src/main/java/com/heibaiying/component/CountBolt.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.component; 2 | 3 | import org.apache.storm.task.OutputCollector; 4 | import org.apache.storm.task.TopologyContext; 5 | import org.apache.storm.topology.OutputFieldsDeclarer; 6 | import org.apache.storm.topology.base.BaseRichBolt; 7 | import org.apache.storm.tuple.Fields; 8 | import org.apache.storm.tuple.Tuple; 9 | import org.apache.storm.tuple.Values; 10 | 11 | import java.util.HashMap; 12 | import java.util.Map; 13 | 14 | /** 15 | * 进行词频统计 16 | */ 17 | public class CountBolt extends BaseRichBolt { 18 | 19 | private Map counts = new HashMap<>(); 20 | 21 | private OutputCollector collector; 22 | 23 | 24 | @Override 25 | public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { 26 | this.collector=collector; 27 | } 28 | 29 | @Override 30 | public void execute(Tuple input) { 31 | String word = input.getStringByField("word"); 32 | Integer count = counts.get(word); 33 | if (count == null) { 34 | count = 0; 35 | } 36 | count++; 37 | counts.put(word, count); 38 | // 输出 39 | collector.emit(new Values(word, String.valueOf(count))); 40 | 41 | } 42 | 43 | @Override 44 | public void declareOutputFields(OutputFieldsDeclarer declarer) { 45 | declarer.declare(new Fields("word", "count")); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /code/Storm/storm-hbase-integration/src/main/java/com/heibaiying/component/DataSourceSpout.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.component; 2 | 3 | import org.apache.storm.shade.org.apache.commons.lang.StringUtils; 4 | import org.apache.storm.spout.SpoutOutputCollector; 5 | import org.apache.storm.task.TopologyContext; 6 | import org.apache.storm.topology.OutputFieldsDeclarer; 7 | import org.apache.storm.topology.base.BaseRichSpout; 8 | import org.apache.storm.tuple.Fields; 9 | import org.apache.storm.tuple.Values; 10 | import org.apache.storm.utils.Utils; 11 | 12 | import java.util.*; 13 | 14 | 15 | /** 16 | * 产生词频样本的数据源 17 | */ 18 | public class DataSourceSpout extends BaseRichSpout { 19 | 20 | private List list = Arrays.asList("Spark", "Hadoop", "HBase", "Storm", "Flink", "Hive"); 21 | 22 | private SpoutOutputCollector spoutOutputCollector; 23 | 24 | @Override 25 | public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) { 26 | this.spoutOutputCollector = spoutOutputCollector; 27 | } 28 | 29 | @Override 30 | public void nextTuple() { 31 | // 模拟产生数据 32 | String lineData = productData(); 33 | spoutOutputCollector.emit(new Values(lineData)); 34 | Utils.sleep(1000); 35 | } 36 | 37 | @Override 38 | public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) { 39 | outputFieldsDeclarer.declare(new Fields("line")); 40 | } 41 | 42 | 43 | /** 44 | * 模拟数据 45 | */ 46 | private String productData() { 47 | Collections.shuffle(list); 48 | Random random = new Random(); 49 | int endIndex = random.nextInt(list.size()) % (list.size()) + 1; 50 | return StringUtils.join(list.toArray(), "\t", 0, endIndex); 51 | } 52 | 53 | } 54 | -------------------------------------------------------------------------------- /code/Storm/storm-hbase-integration/src/main/java/com/heibaiying/component/SplitBolt.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.component; 2 | 3 | import org.apache.storm.task.OutputCollector; 4 | import org.apache.storm.task.TopologyContext; 5 | import org.apache.storm.topology.OutputFieldsDeclarer; 6 | import org.apache.storm.topology.base.BaseRichBolt; 7 | import org.apache.storm.tuple.Fields; 8 | import org.apache.storm.tuple.Tuple; 9 | 10 | import java.util.Map; 11 | 12 | import static org.apache.storm.utils.Utils.tuple; 13 | 14 | /** 15 | * 将每行数据按照指定分隔符进行拆分 16 | */ 17 | public class SplitBolt extends BaseRichBolt { 18 | 19 | private OutputCollector collector; 20 | 21 | @Override 22 | public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { 23 | this.collector = collector; 24 | } 25 | 26 | @Override 27 | public void execute(Tuple input) { 28 | String line = input.getStringByField("line"); 29 | String[] words = line.split("\t"); 30 | for (String word : words) { 31 | collector.emit(tuple(word, 1)); 32 | } 33 | } 34 | 35 | @Override 36 | public void declareOutputFields(OutputFieldsDeclarer declarer) { 37 | declarer.declare(new Fields("word", "count")); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /code/Storm/storm-hdfs-integration/src/main/java/com.heibaiying/component/DataSourceSpout.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.component; 2 | 3 | import org.apache.storm.shade.org.apache.commons.lang.StringUtils; 4 | import org.apache.storm.spout.SpoutOutputCollector; 5 | import org.apache.storm.task.TopologyContext; 6 | import org.apache.storm.topology.OutputFieldsDeclarer; 7 | import org.apache.storm.topology.base.BaseRichSpout; 8 | import org.apache.storm.tuple.Fields; 9 | import org.apache.storm.tuple.Values; 10 | import org.apache.storm.utils.Utils; 11 | 12 | import java.util.*; 13 | 14 | 15 | /** 16 | * 产生词频样本的数据源 17 | */ 18 | public class DataSourceSpout extends BaseRichSpout { 19 | 20 | private List list = Arrays.asList("Spark", "Hadoop", "HBase", "Storm", "Flink", "Hive"); 21 | 22 | private SpoutOutputCollector spoutOutputCollector; 23 | 24 | @Override 25 | public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) { 26 | this.spoutOutputCollector = spoutOutputCollector; 27 | } 28 | 29 | @Override 30 | public void nextTuple() { 31 | // 模拟产生数据 32 | String lineData = productData(); 33 | spoutOutputCollector.emit(new Values(lineData)); 34 | Utils.sleep(1000); 35 | } 36 | 37 | @Override 38 | public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) { 39 | outputFieldsDeclarer.declare(new Fields("line")); 40 | } 41 | 42 | 43 | /** 44 | * 模拟数据 45 | */ 46 | private String productData() { 47 | Collections.shuffle(list); 48 | Random random = new Random(); 49 | int endIndex = random.nextInt(list.size()) % (list.size()) + 1; 50 | return StringUtils.join(list.toArray(), "\t", 0, endIndex); 51 | } 52 | 53 | } 54 | -------------------------------------------------------------------------------- /code/Storm/storm-kafka-integration/src/main/java/com/heibaiying/kafka/read/LogConsoleBolt.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.kafka.read; 2 | 3 | import org.apache.storm.task.OutputCollector; 4 | import org.apache.storm.task.TopologyContext; 5 | import org.apache.storm.topology.OutputFieldsDeclarer; 6 | import org.apache.storm.topology.base.BaseRichBolt; 7 | import org.apache.storm.tuple.Tuple; 8 | 9 | import java.util.Map; 10 | 11 | /** 12 | * 打印从Kafka中获取的数据 13 | */ 14 | public class LogConsoleBolt extends BaseRichBolt { 15 | 16 | 17 | private OutputCollector collector; 18 | 19 | public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { 20 | this.collector=collector; 21 | } 22 | 23 | public void execute(Tuple input) { 24 | try { 25 | String value = input.getStringByField("value"); 26 | System.out.println("received from kafka : "+ value); 27 | // 必须ack,否则会重复消费kafka中的消息 28 | collector.ack(input); 29 | }catch (Exception e){ 30 | e.printStackTrace(); 31 | collector.fail(input); 32 | } 33 | 34 | 35 | } 36 | 37 | public void declareOutputFields(OutputFieldsDeclarer declarer) { 38 | 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /code/Storm/storm-kafka-integration/src/main/java/com/heibaiying/kafka/write/DataSourceSpout.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.kafka.write; 2 | 3 | import org.apache.storm.shade.org.apache.commons.lang.StringUtils; 4 | import org.apache.storm.spout.SpoutOutputCollector; 5 | import org.apache.storm.task.TopologyContext; 6 | import org.apache.storm.topology.OutputFieldsDeclarer; 7 | import org.apache.storm.topology.base.BaseRichSpout; 8 | import org.apache.storm.tuple.Fields; 9 | import org.apache.storm.tuple.Values; 10 | import org.apache.storm.utils.Utils; 11 | 12 | import java.util.*; 13 | 14 | /** 15 | * 产生词频样本的数据源 16 | */ 17 | public class DataSourceSpout extends BaseRichSpout { 18 | 19 | private List list = Arrays.asList("Spark", "Hadoop", "HBase", "Storm", "Flink", "Hive"); 20 | 21 | private SpoutOutputCollector spoutOutputCollector; 22 | 23 | @Override 24 | public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) { 25 | this.spoutOutputCollector = spoutOutputCollector; 26 | } 27 | 28 | @Override 29 | public void nextTuple() { 30 | // 模拟产生数据 31 | String lineData = productData(); 32 | spoutOutputCollector.emit(new Values("key",lineData)); 33 | Utils.sleep(1000); 34 | } 35 | 36 | @Override 37 | public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) { 38 | outputFieldsDeclarer.declare( new Fields("key", "message")); 39 | } 40 | 41 | 42 | /** 43 | * 模拟数据 44 | */ 45 | private String productData() { 46 | Collections.shuffle(list); 47 | Random random = new Random(); 48 | int endIndex = random.nextInt(list.size()) % (list.size()) + 1; 49 | return StringUtils.join(list.toArray(), "\t", 0, endIndex); 50 | } 51 | 52 | } -------------------------------------------------------------------------------- /code/Storm/storm-redis-integration/src/main/java/com/heibaiying/component/CountBolt.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.component; 2 | 3 | import org.apache.storm.task.OutputCollector; 4 | import org.apache.storm.task.TopologyContext; 5 | import org.apache.storm.topology.OutputFieldsDeclarer; 6 | import org.apache.storm.topology.base.BaseRichBolt; 7 | import org.apache.storm.tuple.Fields; 8 | import org.apache.storm.tuple.Tuple; 9 | import org.apache.storm.tuple.Values; 10 | 11 | import java.util.HashMap; 12 | import java.util.Map; 13 | 14 | /** 15 | * 进行词频统计 16 | */ 17 | public class CountBolt extends BaseRichBolt { 18 | 19 | private Map counts = new HashMap<>(); 20 | 21 | private OutputCollector collector; 22 | 23 | 24 | @Override 25 | public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { 26 | this.collector=collector; 27 | } 28 | 29 | @Override 30 | public void execute(Tuple input) { 31 | String word = input.getStringByField("word"); 32 | Integer count = counts.get(word); 33 | if (count == null) { 34 | count = 0; 35 | } 36 | count++; 37 | counts.put(word, count); 38 | // 输出 39 | collector.emit(new Values(word, String.valueOf(count))); 40 | 41 | } 42 | 43 | @Override 44 | public void declareOutputFields(OutputFieldsDeclarer declarer) { 45 | declarer.declare(new Fields("word", "count")); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /code/Storm/storm-redis-integration/src/main/java/com/heibaiying/component/DataSourceSpout.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.component; 2 | 3 | import org.apache.storm.shade.org.apache.commons.lang.StringUtils; 4 | import org.apache.storm.spout.SpoutOutputCollector; 5 | import org.apache.storm.task.TopologyContext; 6 | import org.apache.storm.topology.OutputFieldsDeclarer; 7 | import org.apache.storm.topology.base.BaseRichSpout; 8 | import org.apache.storm.tuple.Fields; 9 | import org.apache.storm.tuple.Values; 10 | import org.apache.storm.utils.Utils; 11 | 12 | import java.util.*; 13 | 14 | 15 | /** 16 | * 产生词频样本的数据源 17 | */ 18 | public class DataSourceSpout extends BaseRichSpout { 19 | 20 | private List list = Arrays.asList("Spark", "Hadoop", "HBase", "Storm", "Flink", "Hive"); 21 | 22 | private SpoutOutputCollector spoutOutputCollector; 23 | 24 | @Override 25 | public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) { 26 | this.spoutOutputCollector = spoutOutputCollector; 27 | } 28 | 29 | @Override 30 | public void nextTuple() { 31 | // 模拟产生数据 32 | String lineData = productData(); 33 | spoutOutputCollector.emit(new Values(lineData)); 34 | Utils.sleep(1000); 35 | } 36 | 37 | @Override 38 | public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) { 39 | outputFieldsDeclarer.declare(new Fields("line")); 40 | } 41 | 42 | 43 | /** 44 | * 模拟数据 45 | */ 46 | private String productData() { 47 | Collections.shuffle(list); 48 | Random random = new Random(); 49 | int endIndex = random.nextInt(list.size()) % (list.size()) + 1; 50 | return StringUtils.join(list.toArray(), "\t", 0, endIndex); 51 | } 52 | 53 | } 54 | -------------------------------------------------------------------------------- /code/Storm/storm-redis-integration/src/main/java/com/heibaiying/component/SplitBolt.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.component; 2 | 3 | import org.apache.storm.task.OutputCollector; 4 | import org.apache.storm.task.TopologyContext; 5 | import org.apache.storm.topology.OutputFieldsDeclarer; 6 | import org.apache.storm.topology.base.BaseRichBolt; 7 | import org.apache.storm.tuple.Fields; 8 | import org.apache.storm.tuple.Tuple; 9 | import org.apache.storm.tuple.Values; 10 | 11 | import java.util.Map; 12 | 13 | /** 14 | * 将每行数据按照指定分隔符进行拆分 15 | */ 16 | public class SplitBolt extends BaseRichBolt { 17 | 18 | private OutputCollector collector; 19 | 20 | @Override 21 | public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { 22 | this.collector = collector; 23 | } 24 | 25 | @Override 26 | public void execute(Tuple input) { 27 | String line = input.getStringByField("line"); 28 | String[] words = line.split("\t"); 29 | for (String word : words) { 30 | collector.emit(new Values(word, String.valueOf(1))); 31 | } 32 | } 33 | 34 | @Override 35 | public void declareOutputFields(OutputFieldsDeclarer declarer) { 36 | declarer.declare(new Fields("word", "count")); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /code/Storm/storm-redis-integration/src/main/java/com/heibaiying/component/WordCountStoreMapper.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.component; 2 | 3 | import org.apache.storm.redis.common.mapper.RedisDataTypeDescription; 4 | import org.apache.storm.redis.common.mapper.RedisStoreMapper; 5 | import org.apache.storm.tuple.ITuple; 6 | 7 | /** 8 | * 定义tuple与Redis中数据的映射关系 9 | */ 10 | public class WordCountStoreMapper implements RedisStoreMapper { 11 | private RedisDataTypeDescription description; 12 | private final String hashKey = "wordCount"; 13 | 14 | public WordCountStoreMapper() { 15 | description = new RedisDataTypeDescription( 16 | RedisDataTypeDescription.RedisDataType.HASH, hashKey); 17 | } 18 | 19 | @Override 20 | public RedisDataTypeDescription getDataTypeDescription() { 21 | return description; 22 | } 23 | 24 | @Override 25 | public String getKeyFromTuple(ITuple tuple) { 26 | return tuple.getStringByField("word"); 27 | } 28 | 29 | @Override 30 | public String getValueFromTuple(ITuple tuple) { 31 | return tuple.getStringByField("count"); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /code/Storm/storm-word-count/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.heibaiying 8 | storm-word-count 9 | 1.0 10 | 11 | 12 | 13 | org.apache.maven.plugins 14 | maven-compiler-plugin 15 | 16 | 8 17 | 8 18 | 19 | 20 | 21 | maven-assembly-plugin 22 | 23 | 24 | src/main/resources/assembly.xml 25 | 26 | 27 | 28 | com.heibaiying.wordcount.ClusterWordCountApp 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | org.apache.storm 40 | storm-core 41 | 1.2.2 42 | 43 | 44 | org.apache.commons 45 | commons-lang3 46 | 3.8.1 47 | 48 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /code/Storm/storm-word-count/src/main/java/com/heibaiying/wordcount/ClusterWordCountApp.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.wordcount; 2 | 3 | import com.heibaiying.wordcount.component.CountBolt; 4 | import com.heibaiying.wordcount.component.DataSourceSpout; 5 | import com.heibaiying.wordcount.component.SplitBolt; 6 | import org.apache.storm.Config; 7 | import org.apache.storm.LocalCluster; 8 | import org.apache.storm.StormSubmitter; 9 | import org.apache.storm.generated.AlreadyAliveException; 10 | import org.apache.storm.generated.AuthorizationException; 11 | import org.apache.storm.generated.InvalidTopologyException; 12 | import org.apache.storm.topology.TopologyBuilder; 13 | 14 | public class ClusterWordCountApp { 15 | 16 | public static void main(String[] args) { 17 | TopologyBuilder builder = new TopologyBuilder(); 18 | builder.setSpout("DataSourceSpout", new DataSourceSpout()); 19 | // 指明将 DataSourceSpout 的数据发送到 SplitBolt 中处理 20 | builder.setBolt("SplitBolt", new SplitBolt()).shuffleGrouping("DataSourceSpout"); 21 | // 指明将 SplitBolt 的数据发送到 CountBolt 中 处理 22 | builder.setBolt("CountBolt", new CountBolt()).shuffleGrouping("SplitBolt"); 23 | 24 | // 使用StormSubmitter提交Topology到服务器集群 25 | try { 26 | StormSubmitter.submitTopology("ClusterWordCountApp", new Config(), builder.createTopology()); 27 | } catch (AlreadyAliveException | InvalidTopologyException | AuthorizationException e) { 28 | e.printStackTrace(); 29 | } 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /code/Storm/storm-word-count/src/main/java/com/heibaiying/wordcount/LocalWordCountApp.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.wordcount; 2 | 3 | import com.heibaiying.wordcount.component.CountBolt; 4 | import com.heibaiying.wordcount.component.DataSourceSpout; 5 | import com.heibaiying.wordcount.component.SplitBolt; 6 | import org.apache.storm.Config; 7 | import org.apache.storm.LocalCluster; 8 | import org.apache.storm.topology.TopologyBuilder; 9 | 10 | public class LocalWordCountApp { 11 | 12 | public static void main(String[] args) { 13 | TopologyBuilder builder = new TopologyBuilder(); 14 | builder.setSpout("DataSourceSpout", new DataSourceSpout()); 15 | // 指明将 DataSourceSpout 的数据发送到 SplitBolt 中处理 16 | builder.setBolt("SplitBolt", new SplitBolt()).shuffleGrouping("DataSourceSpout"); 17 | // 指明将 SplitBolt 的数据发送到 CountBolt 中 处理 18 | builder.setBolt("CountBolt", new CountBolt()).shuffleGrouping("SplitBolt"); 19 | 20 | // 创建本地集群用于测试 这种模式不需要本机安装storm,直接运行该Main方法即可 21 | LocalCluster cluster = new LocalCluster(); 22 | cluster.submitTopology("LocalWordCountApp", 23 | new Config(), builder.createTopology()); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /code/Storm/storm-word-count/src/main/java/com/heibaiying/wordcount/component/CountBolt.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.wordcount.component; 2 | 3 | import org.apache.storm.task.OutputCollector; 4 | import org.apache.storm.task.TopologyContext; 5 | import org.apache.storm.topology.OutputFieldsDeclarer; 6 | import org.apache.storm.topology.base.BaseRichBolt; 7 | import org.apache.storm.tuple.Tuple; 8 | 9 | import java.util.HashMap; 10 | import java.util.Map; 11 | 12 | public class CountBolt extends BaseRichBolt { 13 | 14 | private Map counts = new HashMap<>(); 15 | 16 | @Override 17 | public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { 18 | 19 | } 20 | 21 | @Override 22 | public void execute(Tuple input) { 23 | String word = input.getStringByField("word"); 24 | Integer count = counts.get(word); 25 | if (count == null) { 26 | count = 0; 27 | } 28 | count++; 29 | counts.put(word, count); 30 | // 输出 31 | System.out.print("Real-time analysis results : "); 32 | counts.forEach((key, value) -> System.out.print(key + ":" + value + "; ")); 33 | System.out.println(); 34 | } 35 | 36 | @Override 37 | public void declareOutputFields(OutputFieldsDeclarer declarer) { 38 | 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /code/Storm/storm-word-count/src/main/java/com/heibaiying/wordcount/component/DataSourceSpout.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.wordcount.component; 2 | 3 | import org.apache.commons.lang3.StringUtils; 4 | import org.apache.storm.spout.SpoutOutputCollector; 5 | import org.apache.storm.task.TopologyContext; 6 | import org.apache.storm.topology.OutputFieldsDeclarer; 7 | import org.apache.storm.topology.base.BaseRichSpout; 8 | import org.apache.storm.tuple.Fields; 9 | import org.apache.storm.tuple.Values; 10 | import org.apache.storm.utils.Utils; 11 | 12 | import java.util.*; 13 | 14 | public class DataSourceSpout extends BaseRichSpout { 15 | 16 | private List list = Arrays.asList("Spark", "Hadoop", "HBase", "Storm", "Flink", "Hive"); 17 | 18 | private SpoutOutputCollector spoutOutputCollector; 19 | 20 | @Override 21 | public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) { 22 | this.spoutOutputCollector = spoutOutputCollector; 23 | } 24 | 25 | @Override 26 | public void nextTuple() { 27 | // 模拟产生数据 28 | String lineData = productData(); 29 | spoutOutputCollector.emit(new Values(lineData)); 30 | Utils.sleep(1000); 31 | } 32 | 33 | @Override 34 | public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) { 35 | outputFieldsDeclarer.declare(new Fields("line")); 36 | } 37 | 38 | 39 | /** 40 | * 模拟数据 41 | */ 42 | private String productData() { 43 | Collections.shuffle(list); 44 | Random random = new Random(); 45 | int endIndex = random.nextInt(list.size()) % (list.size()) + 1; 46 | return StringUtils.join(list.toArray(), "\t", 0, endIndex); 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /code/Storm/storm-word-count/src/main/java/com/heibaiying/wordcount/component/SplitBolt.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.wordcount.component; 2 | 3 | import org.apache.storm.task.OutputCollector; 4 | import org.apache.storm.task.TopologyContext; 5 | import org.apache.storm.topology.OutputFieldsDeclarer; 6 | import org.apache.storm.topology.base.BaseRichBolt; 7 | import org.apache.storm.tuple.Fields; 8 | import org.apache.storm.tuple.Tuple; 9 | import org.apache.storm.tuple.Values; 10 | 11 | import java.util.Map; 12 | 13 | public class SplitBolt extends BaseRichBolt { 14 | 15 | private OutputCollector collector; 16 | 17 | @Override 18 | public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { 19 | this.collector=collector; 20 | } 21 | 22 | @Override 23 | public void execute(Tuple input) { 24 | String line = input.getStringByField("line"); 25 | String[] words = line.split("\t"); 26 | for (String word : words) { 27 | collector.emit(new Values(word)); 28 | } 29 | } 30 | 31 | @Override 32 | public void declareOutputFields(OutputFieldsDeclarer declarer) { 33 | declarer.declare(new Fields("word")); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /code/Storm/storm-word-count/src/main/resources/assembly.xml: -------------------------------------------------------------------------------- 1 | 4 | 5 | jar-with-dependencies 6 | 7 | 8 | 9 | jar 10 | 11 | 12 | false 13 | 14 | 15 | / 16 | true 17 | true 18 | runtime 19 | 20 | 21 | org.apache.storm:storm-core 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /code/Zookeeper/curator/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.heibaiying 8 | curator 9 | 1.0 10 | 11 | 12 | 13 | org.apache.maven.plugins 14 | maven-compiler-plugin 15 | 16 | 8 17 | 8 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | org.apache.curator 27 | curator-framework 28 | 4.0.0 29 | 30 | 31 | org.apache.curator 32 | curator-recipes 33 | 4.0.0 34 | 35 | 36 | org.apache.zookeeper 37 | zookeeper 38 | 3.4.13 39 | 40 | 41 | 42 | junit 43 | junit 44 | 4.12 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /code/spark/spark-streaming-basis/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.heibaiying 8 | spark-streaming-basis 9 | 1.0 10 | 11 | 12 | 13 | org.apache.spark 14 | spark-streaming_2.12 15 | 2.4.3 16 | 17 | 18 | redis.clients 19 | jedis 20 | 2.9.0 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /code/spark/spark-streaming-basis/src/main/java/com/heibaiying/NetworkWordCount.scala: -------------------------------------------------------------------------------- 1 | package com.heibaiying 2 | 3 | import org.apache.spark.SparkConf 4 | import org.apache.spark.streaming.{Seconds, StreamingContext} 5 | 6 | /** 7 | * 词频统计 8 | */ 9 | object NetworkWordCount { 10 | 11 | 12 | def main(args: Array[String]) { 13 | 14 | /*指定时间间隔为5s*/ 15 | val sparkConf = new SparkConf().setAppName("NetworkWordCount").setMaster("local[2]") 16 | val ssc = new StreamingContext(sparkConf, Seconds(5)) 17 | 18 | /*创建文本输入流,并进行词频统计*/ 19 | val lines = ssc.socketTextStream("hadoop001", 9999) 20 | lines.flatMap(_.split(" ")).map(x => (x, 1)).reduceByKey(_ + _).print() 21 | 22 | /*启动服务*/ 23 | ssc.start() 24 | /*等待服务结束*/ 25 | ssc.awaitTermination() 26 | 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /code/spark/spark-streaming-basis/src/main/java/com/heibaiying/NetworkWordCountToRedis.scala: -------------------------------------------------------------------------------- 1 | package com.heibaiying 2 | 3 | import com.heibaiying.utils.JedisPoolUtil 4 | import org.apache.spark.SparkConf 5 | import org.apache.spark.streaming.dstream.DStream 6 | import org.apache.spark.streaming.{Seconds, StreamingContext} 7 | import redis.clients.jedis.Jedis 8 | 9 | /** 10 | * 词频统计 11 | */ 12 | object NetworkWordCountToRedis { 13 | 14 | 15 | def main(args: Array[String]) { 16 | 17 | /*指定时间间隔为5s*/ 18 | val sparkConf = new SparkConf().setAppName("NetworkWordCountToRedis").setMaster("local[2]") 19 | val ssc = new StreamingContext(sparkConf, Seconds(5)) 20 | 21 | /*创建文本输入流,并进行词频统计*/ 22 | val lines = ssc.socketTextStream("hadoop001", 9999) 23 | val pairs: DStream[(String, Int)] = lines.flatMap(_.split(" ")).map(x => (x, 1)).reduceByKey(_ + _) 24 | 25 | pairs.foreachRDD { rdd => 26 | rdd.foreachPartition { partitionOfRecords => 27 | var jedis: Jedis = null 28 | try { 29 | jedis = JedisPoolUtil.getConnection 30 | partitionOfRecords.foreach(record => jedis.hincrBy("wordCount", record._1, record._2)) 31 | } catch { 32 | case ex: Exception => 33 | ex.printStackTrace() 34 | } finally { 35 | if (jedis != null) jedis.close() 36 | } 37 | } 38 | } 39 | 40 | /*启动服务*/ 41 | ssc.start() 42 | /*等待服务结束*/ 43 | ssc.awaitTermination() 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /code/spark/spark-streaming-basis/src/main/java/com/heibaiying/NetworkWordCountV2.scala: -------------------------------------------------------------------------------- 1 | package com.heibaiying 2 | 3 | import org.apache.spark.SparkConf 4 | import org.apache.spark.streaming.{Seconds, StreamingContext} 5 | 6 | /** 7 | * 词频统计升级版 8 | */ 9 | object NetworkWordCountV2 { 10 | 11 | 12 | def main(args: Array[String]) { 13 | 14 | /* 15 | * 本地测试时最好指定hadoop用户名,否则会默认使用本地电脑的用户名, 16 | * 此时在HDFS上创建目录时可能会抛出权限不足的异常 17 | */ 18 | System.setProperty("HADOOP_USER_NAME", "root") 19 | 20 | /*指定时间间隔为5s*/ 21 | val sparkConf = new SparkConf().setAppName("NetworkWordCountV2").setMaster("local[2]") 22 | val ssc = new StreamingContext(sparkConf, Seconds(5)) 23 | 24 | /*必须要设置检查点*/ 25 | ssc.checkpoint("hdfs://hadoop001:8020/spark-streaming") 26 | 27 | /*创建文本输入流,并进行词频统计*/ 28 | val lines = ssc.socketTextStream("hadoop001", 9999) 29 | lines.flatMap(_.split(" ")).map(x => (x, 1)) 30 | .updateStateByKey[Int](updateFunction _) 31 | .print() 32 | 33 | /*启动服务*/ 34 | ssc.start() 35 | /*等待服务结束*/ 36 | ssc.awaitTermination() 37 | 38 | } 39 | 40 | /** 41 | * 累计求和 42 | * 43 | * @param currentValues 当前的数据 44 | * @param preValues 之前的数据 45 | * @return 相加后的数据 46 | */ 47 | def updateFunction(currentValues: Seq[Int], preValues: Option[Int]): Option[Int] = { 48 | val current = currentValues.sum 49 | val pre = preValues.getOrElse(0) 50 | Some(current + pre) 51 | } 52 | 53 | } 54 | -------------------------------------------------------------------------------- /code/spark/spark-streaming-basis/src/main/java/com/heibaiying/utils/JedisPoolUtil.java: -------------------------------------------------------------------------------- 1 | package com.heibaiying.utils; 2 | 3 | import redis.clients.jedis.Jedis; 4 | import redis.clients.jedis.JedisPool; 5 | import redis.clients.jedis.JedisPoolConfig; 6 | 7 | public class JedisPoolUtil { 8 | 9 | /* 声明为volatile防止指令重排序 */ 10 | private static volatile JedisPool jedisPool = null; 11 | 12 | private static final String HOST = "localhost"; 13 | private static final int PORT = 6379; 14 | 15 | 16 | /* 双重检查锁实现懒汉式单例 */ 17 | public static Jedis getConnection() { 18 | if (jedisPool == null) { 19 | synchronized (JedisPoolUtil.class) { 20 | if (jedisPool == null) { 21 | JedisPoolConfig config = new JedisPoolConfig(); 22 | config.setMaxTotal(30); 23 | config.setMaxIdle(10); 24 | jedisPool = new JedisPool(config, HOST, PORT); 25 | } 26 | } 27 | } 28 | return jedisPool.getResource(); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /code/spark/spark-streaming-flume/src/main/scala/com/heibaiying/flume/PullBasedWordCount.scala: -------------------------------------------------------------------------------- 1 | package com.heibaiying.flume 2 | 3 | import org.apache.spark.SparkConf 4 | import org.apache.spark.streaming.{Seconds, StreamingContext} 5 | import org.apache.spark.streaming.flume.FlumeUtils 6 | 7 | /** 8 | * @author : heibaiying 9 | * 使用自定义接收器的基于拉的方法获取数据 10 | */ 11 | object PullBasedWordCount { 12 | 13 | def main(args: Array[String]): Unit = { 14 | 15 | val sparkConf = new SparkConf() 16 | val ssc = new StreamingContext(sparkConf, Seconds(5)) 17 | 18 | // 1.获取输入流 19 | val flumeStream = FlumeUtils.createPollingStream(ssc, "hadoop001", 8888) 20 | 21 | // 2.打印输入流中的数据 22 | flumeStream.map(line => new String(line.event.getBody.array()).trim).print() 23 | 24 | ssc.start() 25 | ssc.awaitTermination() 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /code/spark/spark-streaming-flume/src/main/scala/com/heibaiying/flume/PushBasedWordCount.scala: -------------------------------------------------------------------------------- 1 | package com.heibaiying.flume 2 | 3 | import org.apache.spark.SparkConf 4 | import org.apache.spark.streaming.{Seconds, StreamingContext} 5 | import org.apache.spark.streaming.flume.FlumeUtils 6 | 7 | 8 | /** 9 | * @author : heibaiying 10 | * 基于推的方法获取数据 11 | */ 12 | object PushBasedWordCount { 13 | 14 | def main(args: Array[String]): Unit = { 15 | 16 | val sparkConf = new SparkConf() 17 | val ssc = new StreamingContext(sparkConf, Seconds(5)) 18 | 19 | // 1.获取输入流 20 | val flumeStream = FlumeUtils.createStream(ssc, "hadoop001", 8888) 21 | 22 | // 2.打印输入流的数据 23 | flumeStream.map(line => new String(line.event.getBody.array()).trim).print() 24 | 25 | ssc.start() 26 | ssc.awaitTermination() 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /code/spark/spark-streaming-kafka/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.heibaiying 8 | spark-streaming-kafka 9 | 1.0 10 | 11 | 12 | 2.12 13 | 2.4.0 14 | 15 | 16 | 17 | 18 | 19 | org.apache.spark 20 | spark-streaming_${scala.version} 21 | ${spark.version} 22 | 23 | 24 | 25 | org.apache.spark 26 | spark-streaming-kafka-0-10_${scala.version} 27 | 2.4.3 28 | 29 | 31 | 32 | com.thoughtworks.paranamer 33 | paranamer 34 | 2.8 35 | 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /notes/installation/Linux下Flume的安装.md: -------------------------------------------------------------------------------- 1 | # Linux下Flume的安装 2 | 3 | 4 | ## 一、前置条件 5 | 6 | Flume 需要依赖 JDK 1.8+,JDK 安装方式见本仓库: 7 | 8 | > [Linux 环境下 JDK 安装](https://github.com/heibaiying/BigData-Notes/blob/master/notes/installation/Linux下JDK安装.md) 9 | 10 | 11 | 12 | ## 二 、安装步骤 13 | 14 | ### 2.1 下载并解压 15 | 16 | 下载所需版本的 Flume,这里我下载的是 `CDH` 版本的 Flume。下载地址为:http://archive.cloudera.com/cdh5/cdh/5/ 17 | 18 | ```shell 19 | # 下载后进行解压 20 | tar -zxvf flume-ng-1.6.0-cdh5.15.2.tar.gz 21 | ``` 22 | 23 | ### 2.2 配置环境变量 24 | 25 | ```shell 26 | # vim /etc/profile 27 | ``` 28 | 29 | 添加环境变量: 30 | 31 | ```shell 32 | export FLUME_HOME=/usr/app/apache-flume-1.6.0-cdh5.15.2-bin 33 | export PATH=$FLUME_HOME/bin:$PATH 34 | ``` 35 | 36 | 使得配置的环境变量立即生效: 37 | 38 | ```shell 39 | # source /etc/profile 40 | ``` 41 | 42 | ### 2.3 修改配置 43 | 44 | 进入安装目录下的 `conf/` 目录,拷贝 Flume 的环境配置模板 `flume-env.sh.template`: 45 | 46 | ```shell 47 | # cp flume-env.sh.template flume-env.sh 48 | ``` 49 | 50 | 修改 `flume-env.sh`,指定 JDK 的安装路径: 51 | 52 | ```shell 53 | # Enviroment variables can be set here. 54 | export JAVA_HOME=/usr/java/jdk1.8.0_201 55 | ``` 56 | 57 | ### 2.4 验证 58 | 59 | 由于已经将 Flume 的 bin 目录配置到环境变量,直接使用以下命令验证是否配置成功: 60 | 61 | ```shell 62 | # flume-ng version 63 | ``` 64 | 65 | 出现对应的版本信息则代表配置成功。 66 | 67 | ![flume-version](https://gitee.com/heibaiying/BigData-Notes/raw/master/pictures/flume-version.png) 68 | 69 | 70 | 71 |
-------------------------------------------------------------------------------- /notes/installation/Linux下JDK安装.md: -------------------------------------------------------------------------------- 1 | # Linux下JDK的安装 2 | 3 | >**系统环境**:centos 7.6 4 | > 5 | >**JDK 版本**:jdk 1.8.0_20 6 | 7 | 8 | 9 | ### 1. 下载并解压 10 | 11 | 在[官网](https://www.oracle.com/technetwork/java/javase/downloads/index.html) 下载所需版本的 JDK,这里我下载的版本为[JDK 1.8](https://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html) ,下载后进行解压: 12 | 13 | ```shell 14 | [root@ java]# tar -zxvf jdk-8u201-linux-x64.tar.gz 15 | ``` 16 | 17 | 18 | 19 | ### 2. 设置环境变量 20 | 21 | ```shell 22 | [root@ java]# vi /etc/profile 23 | ``` 24 | 25 | 添加如下配置: 26 | 27 | ```shell 28 | export JAVA_HOME=/usr/java/jdk1.8.0_201 29 | export JRE_HOME=${JAVA_HOME}/jre 30 | export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib 31 | export PATH=${JAVA_HOME}/bin:$PATH 32 | ``` 33 | 34 | 执行 `source` 命令,使得配置立即生效: 35 | 36 | ```shell 37 | [root@ java]# source /etc/profile 38 | ``` 39 | 40 | 41 | 42 | ### 3. 检查是否安装成功 43 | 44 | ```shell 45 | [root@ java]# java -version 46 | ``` 47 | 48 | 显示出对应的版本信息则代表安装成功。 49 | 50 | ```shell 51 | java version "1.8.0_201" 52 | Java(TM) SE Runtime Environment (build 1.8.0_201-b09) 53 | Java HotSpot(TM) 64-Bit Server VM (build 25.201-b09, mixed mode) 54 | 55 | ``` 56 | 57 | 58 |
-------------------------------------------------------------------------------- /notes/installation/Linux下Python安装.md: -------------------------------------------------------------------------------- 1 | ## Linux下Python安装 2 | 3 | >**系统环境**:centos 7.6 4 | > 5 | >**Python 版本**:Python-3.6.8 6 | 7 | ### 1. 环境依赖 8 | 9 | Python3.x 的安装需要依赖这四个组件:gcc, zlib,zlib-devel,openssl-devel;所以需要预先安装,命令如下: 10 | 11 | ```shell 12 | yum install gcc -y 13 | yum install zlib -y 14 | yum install zlib-devel -y 15 | yum install openssl-devel -y 16 | ``` 17 | 18 | ### 2. 下载编译 19 | 20 | Python 源码包下载地址: https://www.python.org/downloads/ 21 | 22 | ```shell 23 | # wget https://www.python.org/ftp/python/3.6.8/Python-3.6.8.tgz 24 | ``` 25 | 26 | ### 3. 解压编译 27 | 28 | ```shell 29 | # tar -zxvf Python-3.6.8.tgz 30 | ``` 31 | 32 | 进入根目录进行编译,可以指定编译安装的路径,这里我们指定为 `/usr/app/python3.6` : 33 | 34 | ```shell 35 | # cd Python-3.6.8 36 | # ./configure --prefix=/usr/app/python3.6 37 | # make && make install 38 | ``` 39 | 40 | ### 4. 环境变量配置 41 | 42 | ```shell 43 | vim /etc/profile 44 | ``` 45 | 46 | ```shell 47 | export PYTHON_HOME=/usr/app/python3.6 48 | export PATH=${PYTHON_HOME}/bin:$PATH 49 | ``` 50 | 51 | 使得配置的环境变量立即生效: 52 | 53 | ```shell 54 | source /etc/profile 55 | ``` 56 | 57 | ### 5. 验证安装是否成功 58 | 59 | 输入 `python3` 命令,如果能进入 python 交互环境,则代表安装成功: 60 | 61 | ```shell 62 | [root@hadoop001 app]# python3 63 | Python 3.6.8 (default, Mar 29 2019, 10:17:41) 64 | [GCC 4.8.5 20150623 (Red Hat 4.8.5-36)] on linux 65 | Type "help", "copyright", "credits" or "license" for more information. 66 | >>> 1+1 67 | 2 68 | >>> exit() 69 | [root@hadoop001 app]# 70 | ``` 71 | 72 | 73 | 74 |
-------------------------------------------------------------------------------- /notes/大数据技术栈思维导图.md: -------------------------------------------------------------------------------- 1 |
2 | 3 | 4 | 5 |
-------------------------------------------------------------------------------- /pictures/01_data_at_rest_infrastructure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/01_data_at_rest_infrastructure.png -------------------------------------------------------------------------------- /pictures/02_stream_processing_infrastructure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/02_stream_processing_infrastructure.png -------------------------------------------------------------------------------- /pictures/CustomRedisCountApp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/CustomRedisCountApp.png -------------------------------------------------------------------------------- /pictures/Detailed-Hadoop-MapReduce-Data-Flow-14.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/Detailed-Hadoop-MapReduce-Data-Flow-14.png -------------------------------------------------------------------------------- /pictures/Figure3Architecture-of-YARN.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/Figure3Architecture-of-YARN.png -------------------------------------------------------------------------------- /pictures/HADOOP-ECOSYSTEM-Edureka.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/HADOOP-ECOSYSTEM-Edureka.png -------------------------------------------------------------------------------- /pictures/HBaseArchitecture-Blog-Fig1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/HBaseArchitecture-Blog-Fig1.png -------------------------------------------------------------------------------- /pictures/HBaseArchitecture-Blog-Fig2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/HBaseArchitecture-Blog-Fig2.png -------------------------------------------------------------------------------- /pictures/HBaseArchitecture-Blog-Fig3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/HBaseArchitecture-Blog-Fig3.png -------------------------------------------------------------------------------- /pictures/HBaseArchitecture-Blog-Fig4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/HBaseArchitecture-Blog-Fig4.png -------------------------------------------------------------------------------- /pictures/HBaseArchitecture-Blog-Fig5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/HBaseArchitecture-Blog-Fig5.png -------------------------------------------------------------------------------- /pictures/HBaseArchitecture-Blog-Fig6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/HBaseArchitecture-Blog-Fig6.png -------------------------------------------------------------------------------- /pictures/HBaseArchitecture-Blog-Fig7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/HBaseArchitecture-Blog-Fig7.png -------------------------------------------------------------------------------- /pictures/HBase_table-iteblog.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/HBase_table-iteblog.png -------------------------------------------------------------------------------- /pictures/HDFS-HA-Architecture-Edureka.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/HDFS-HA-Architecture-Edureka.png -------------------------------------------------------------------------------- /pictures/HashMap-HashTable.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/HashMap-HashTable.png -------------------------------------------------------------------------------- /pictures/Internal-Working-of-Apache-Storm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/Internal-Working-of-Apache-Storm.png -------------------------------------------------------------------------------- /pictures/Phoenix-create-table.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/Phoenix-create-table.png -------------------------------------------------------------------------------- /pictures/Phoenix-delete.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/Phoenix-delete.png -------------------------------------------------------------------------------- /pictures/Phoenix-hadoop.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/Phoenix-hadoop.png -------------------------------------------------------------------------------- /pictures/Phoenix-java-api-result.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/Phoenix-java-api-result.png -------------------------------------------------------------------------------- /pictures/Phoenix-select.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/Phoenix-select.png -------------------------------------------------------------------------------- /pictures/Phoenix-update.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/Phoenix-update.png -------------------------------------------------------------------------------- /pictures/RegionObserver.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/RegionObserver.png -------------------------------------------------------------------------------- /pictures/RegionObservers-works.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/RegionObservers-works.png -------------------------------------------------------------------------------- /pictures/Stream groupings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/Stream groupings.png -------------------------------------------------------------------------------- /pictures/WordCountToHBaseApp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/WordCountToHBaseApp.png -------------------------------------------------------------------------------- /pictures/akaban-jps.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/akaban-jps.png -------------------------------------------------------------------------------- /pictures/azkaban-click-edit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-click-edit.png -------------------------------------------------------------------------------- /pictures/azkaban-create-project.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-create-project.png -------------------------------------------------------------------------------- /pictures/azkaban-dependencies.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-dependencies.png -------------------------------------------------------------------------------- /pictures/azkaban-edit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-edit.png -------------------------------------------------------------------------------- /pictures/azkaban-embeded-flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-embeded-flow.png -------------------------------------------------------------------------------- /pictures/azkaban-embeded-success.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-embeded-success.png -------------------------------------------------------------------------------- /pictures/azkaban-execute.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-execute.png -------------------------------------------------------------------------------- /pictures/azkaban-flows.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-flows.png -------------------------------------------------------------------------------- /pictures/azkaban-gradle-wrapper-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-gradle-wrapper-2.png -------------------------------------------------------------------------------- /pictures/azkaban-gradle-wrapper.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-gradle-wrapper.png -------------------------------------------------------------------------------- /pictures/azkaban-hdfs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-hdfs.png -------------------------------------------------------------------------------- /pictures/azkaban-hive-result.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-hive-result.png -------------------------------------------------------------------------------- /pictures/azkaban-hive.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-hive.png -------------------------------------------------------------------------------- /pictures/azkaban-log.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-log.png -------------------------------------------------------------------------------- /pictures/azkaban-memory.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-memory.png -------------------------------------------------------------------------------- /pictures/azkaban-mr.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-mr.png -------------------------------------------------------------------------------- /pictures/azkaban-project-edit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-project-edit.png -------------------------------------------------------------------------------- /pictures/azkaban-setting.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-setting.png -------------------------------------------------------------------------------- /pictures/azkaban-simle-result.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-simle-result.png -------------------------------------------------------------------------------- /pictures/azkaban-simple.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-simple.png -------------------------------------------------------------------------------- /pictures/azkaban-successed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-successed.png -------------------------------------------------------------------------------- /pictures/azkaban-task-abcde-zip.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-task-abcde-zip.png -------------------------------------------------------------------------------- /pictures/azkaban-task-abcde.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-task-abcde.png -------------------------------------------------------------------------------- /pictures/azkaban-upload.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-upload.png -------------------------------------------------------------------------------- /pictures/azkaban-web-ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-web-ui.png -------------------------------------------------------------------------------- /pictures/azkaban-web.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-web.png -------------------------------------------------------------------------------- /pictures/azkaban-zip.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban-zip.png -------------------------------------------------------------------------------- /pictures/azkaban.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/azkaban.png -------------------------------------------------------------------------------- /pictures/bigdata-notes-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/bigdata-notes-icon.png -------------------------------------------------------------------------------- /pictures/bigdata-notes-icon.psd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/bigdata-notes-icon.psd -------------------------------------------------------------------------------- /pictures/blog-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/blog-logo.png -------------------------------------------------------------------------------- /pictures/curator-retry-policy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/curator-retry-policy.png -------------------------------------------------------------------------------- /pictures/datasourcetohdfs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/datasourcetohdfs.png -------------------------------------------------------------------------------- /pictures/deprecated.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/deprecated.png -------------------------------------------------------------------------------- /pictures/flink-Rescaling.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-Rescaling.png -------------------------------------------------------------------------------- /pictures/flink-RichParallelSourceFunction.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-RichParallelSourceFunction.png -------------------------------------------------------------------------------- /pictures/flink-api-stack.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-api-stack.png -------------------------------------------------------------------------------- /pictures/flink-application-submission.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-application-submission.png -------------------------------------------------------------------------------- /pictures/flink-basis-project.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-basis-project.png -------------------------------------------------------------------------------- /pictures/flink-bounded-unbounded.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-bounded-unbounded.png -------------------------------------------------------------------------------- /pictures/flink-checkpoints-backend.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-checkpoints-backend.png -------------------------------------------------------------------------------- /pictures/flink-dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-dashboard.png -------------------------------------------------------------------------------- /pictures/flink-download.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-download.png -------------------------------------------------------------------------------- /pictures/flink-kafka-datasource-console.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-kafka-datasource-console.png -------------------------------------------------------------------------------- /pictures/flink-kafka-datasource-producer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-kafka-datasource-producer.png -------------------------------------------------------------------------------- /pictures/flink-kafka-producer-consumer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-kafka-producer-consumer.png -------------------------------------------------------------------------------- /pictures/flink-keyed-state.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-keyed-state.png -------------------------------------------------------------------------------- /pictures/flink-lib.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-lib.png -------------------------------------------------------------------------------- /pictures/flink-maven-new.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-maven-new.png -------------------------------------------------------------------------------- /pictures/flink-maven-profile.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-maven-profile.png -------------------------------------------------------------------------------- /pictures/flink-maven.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-maven.png -------------------------------------------------------------------------------- /pictures/flink-mysql-sink.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-mysql-sink.png -------------------------------------------------------------------------------- /pictures/flink-non-windowed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-non-windowed.png -------------------------------------------------------------------------------- /pictures/flink-on-yarn-session.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-on-yarn-session.jpg -------------------------------------------------------------------------------- /pictures/flink-operator-state-para1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-operator-state-para1.png -------------------------------------------------------------------------------- /pictures/flink-operator-state-para2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-operator-state-para2.png -------------------------------------------------------------------------------- /pictures/flink-operator-state.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-operator-state.png -------------------------------------------------------------------------------- /pictures/flink-optional-components.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-optional-components.png -------------------------------------------------------------------------------- /pictures/flink-process.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-process.png -------------------------------------------------------------------------------- /pictures/flink-richsink.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-richsink.png -------------------------------------------------------------------------------- /pictures/flink-scala-shell.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-scala-shell.png -------------------------------------------------------------------------------- /pictures/flink-session-windows.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-session-windows.png -------------------------------------------------------------------------------- /pictures/flink-sliding-windows.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-sliding-windows.png -------------------------------------------------------------------------------- /pictures/flink-socket-wordcount-stdout.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-socket-wordcount-stdout.png -------------------------------------------------------------------------------- /pictures/flink-socket-wordcount.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-socket-wordcount.png -------------------------------------------------------------------------------- /pictures/flink-stack.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-stack.png -------------------------------------------------------------------------------- /pictures/flink-standalone-cluster-ha.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-standalone-cluster-ha.png -------------------------------------------------------------------------------- /pictures/flink-standalone-cluster-jps.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-standalone-cluster-jps.png -------------------------------------------------------------------------------- /pictures/flink-standalone-cluster.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-standalone-cluster.jpg -------------------------------------------------------------------------------- /pictures/flink-start-cluster-shell.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-start-cluster-shell.png -------------------------------------------------------------------------------- /pictures/flink-state-management.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-state-management.png -------------------------------------------------------------------------------- /pictures/flink-stateful-stream.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-stateful-stream.png -------------------------------------------------------------------------------- /pictures/flink-stream-barriers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-stream-barriers.png -------------------------------------------------------------------------------- /pictures/flink-subtask-slots.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-subtask-slots.png -------------------------------------------------------------------------------- /pictures/flink-task-parallelism.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-task-parallelism.png -------------------------------------------------------------------------------- /pictures/flink-task-subtask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-task-subtask.png -------------------------------------------------------------------------------- /pictures/flink-tasks-slots.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-tasks-slots.png -------------------------------------------------------------------------------- /pictures/flink-tumbling-windows.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-tumbling-windows.png -------------------------------------------------------------------------------- /pictures/flink-window-word-count.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-window-word-count.png -------------------------------------------------------------------------------- /pictures/flink-word-count.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-word-count.png -------------------------------------------------------------------------------- /pictures/flink-yarn-session.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink-yarn-session.png -------------------------------------------------------------------------------- /pictures/flink.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flink.png -------------------------------------------------------------------------------- /pictures/flume-architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-architecture.png -------------------------------------------------------------------------------- /pictures/flume-consolidation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-consolidation.png -------------------------------------------------------------------------------- /pictures/flume-example-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-example-1.png -------------------------------------------------------------------------------- /pictures/flume-example-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-example-2.png -------------------------------------------------------------------------------- /pictures/flume-example-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-example-3.png -------------------------------------------------------------------------------- /pictures/flume-example-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-example-4.png -------------------------------------------------------------------------------- /pictures/flume-example-7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-example-7.png -------------------------------------------------------------------------------- /pictures/flume-example-8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-example-8.png -------------------------------------------------------------------------------- /pictures/flume-example-9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-example-9.png -------------------------------------------------------------------------------- /pictures/flume-kafka-01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-kafka-01.png -------------------------------------------------------------------------------- /pictures/flume-kafka-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-kafka-2.png -------------------------------------------------------------------------------- /pictures/flume-kafka.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-kafka.png -------------------------------------------------------------------------------- /pictures/flume-multi-agent-flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-multi-agent-flow.png -------------------------------------------------------------------------------- /pictures/flume-multiplexing-the-flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-multiplexing-the-flow.png -------------------------------------------------------------------------------- /pictures/flume-retry.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-retry.png -------------------------------------------------------------------------------- /pictures/flume-version.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume-version.png -------------------------------------------------------------------------------- /pictures/flume.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/flume.png -------------------------------------------------------------------------------- /pictures/full-stack-notes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/full-stack-notes.png -------------------------------------------------------------------------------- /pictures/future-of-spark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/future-of-spark.png -------------------------------------------------------------------------------- /pictures/hadoop-QJM-同步机制.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop-QJM-同步机制.png -------------------------------------------------------------------------------- /pictures/hadoop-code-mapping.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop-code-mapping.png -------------------------------------------------------------------------------- /pictures/hadoop-code-partitation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop-code-partitation.png -------------------------------------------------------------------------------- /pictures/hadoop-code-reducer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop-code-reducer.png -------------------------------------------------------------------------------- /pictures/hadoop-combiner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop-combiner.png -------------------------------------------------------------------------------- /pictures/hadoop-ha高可用集群架构.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop-ha高可用集群架构.png -------------------------------------------------------------------------------- /pictures/hadoop-namenode主备切换.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop-namenode主备切换.png -------------------------------------------------------------------------------- /pictures/hadoop-no-combiner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop-no-combiner.png -------------------------------------------------------------------------------- /pictures/hadoop-rm-ha-overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop-rm-ha-overview.png -------------------------------------------------------------------------------- /pictures/hadoop-wordcountapp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop-wordcountapp.png -------------------------------------------------------------------------------- /pictures/hadoop-wordcountcombinerpartition.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop-wordcountcombinerpartition.png -------------------------------------------------------------------------------- /pictures/hadoop-yarn安装验证.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop-yarn安装验证.png -------------------------------------------------------------------------------- /pictures/hadoop-集群搭建2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop-集群搭建2.png -------------------------------------------------------------------------------- /pictures/hadoop-集群搭建3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop-集群搭建3.png -------------------------------------------------------------------------------- /pictures/hadoop-集群环境搭建.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop-集群环境搭建.png -------------------------------------------------------------------------------- /pictures/hadoop.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop.jpg -------------------------------------------------------------------------------- /pictures/hadoop安装验证.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop安装验证.png -------------------------------------------------------------------------------- /pictures/hadoop集群规划.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop集群规划.png -------------------------------------------------------------------------------- /pictures/hadoop高可用集群1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop高可用集群1.png -------------------------------------------------------------------------------- /pictures/hadoop高可用集群2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop高可用集群2.png -------------------------------------------------------------------------------- /pictures/hadoop高可用集群3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop高可用集群3.png -------------------------------------------------------------------------------- /pictures/hadoop高可用集群4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop高可用集群4.png -------------------------------------------------------------------------------- /pictures/hadoop高可用集群5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop高可用集群5.png -------------------------------------------------------------------------------- /pictures/hadoop高可用集群规划.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hadoop高可用集群规划.png -------------------------------------------------------------------------------- /pictures/hbase-60010.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-60010.png -------------------------------------------------------------------------------- /pictures/hbase-Region-Server.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-Region-Server.png -------------------------------------------------------------------------------- /pictures/hbase-arc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-arc.png -------------------------------------------------------------------------------- /pictures/hbase-bytearraycomparable.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-bytearraycomparable.png -------------------------------------------------------------------------------- /pictures/hbase-co-unload.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-co-unload.png -------------------------------------------------------------------------------- /pictures/hbase-compareFilter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-compareFilter.png -------------------------------------------------------------------------------- /pictures/hbase-connection.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-connection.png -------------------------------------------------------------------------------- /pictures/hbase-coprocessor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-coprocessor.png -------------------------------------------------------------------------------- /pictures/hbase-copy-table.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-copy-table.png -------------------------------------------------------------------------------- /pictures/hbase-cp-hdfs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-cp-hdfs.png -------------------------------------------------------------------------------- /pictures/hbase-cp-helloworld.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-cp-helloworld.png -------------------------------------------------------------------------------- /pictures/hbase-cp-lisi.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-cp-lisi.png -------------------------------------------------------------------------------- /pictures/hbase-cp-load.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-cp-load.png -------------------------------------------------------------------------------- /pictures/hbase-filterbase-subclass.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-filterbase-subclass.png -------------------------------------------------------------------------------- /pictures/hbase-fliter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-fliter.png -------------------------------------------------------------------------------- /pictures/hbase-hadoop.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-hadoop.png -------------------------------------------------------------------------------- /pictures/hbase-region-dis.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-region-dis.png -------------------------------------------------------------------------------- /pictures/hbase-region-splite.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-region-splite.png -------------------------------------------------------------------------------- /pictures/hbase-unload-test.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-unload-test.png -------------------------------------------------------------------------------- /pictures/hbase-web-ui-phoenix.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-web-ui-phoenix.png -------------------------------------------------------------------------------- /pictures/hbase-web-ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-web-ui.png -------------------------------------------------------------------------------- /pictures/hbase-webtable.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-webtable.png -------------------------------------------------------------------------------- /pictures/hbase-集群搭建1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-集群搭建1.png -------------------------------------------------------------------------------- /pictures/hbase-集群搭建2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase-集群搭建2.png -------------------------------------------------------------------------------- /pictures/hbase.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase.jpg -------------------------------------------------------------------------------- /pictures/hbase.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase.png -------------------------------------------------------------------------------- /pictures/hbase集群规划.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hbase集群规划.png -------------------------------------------------------------------------------- /pictures/hdfs-read-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hdfs-read-1.jpg -------------------------------------------------------------------------------- /pictures/hdfs-tolerance-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hdfs-tolerance-1.jpg -------------------------------------------------------------------------------- /pictures/hdfs-tolerance-2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hdfs-tolerance-2.jpg -------------------------------------------------------------------------------- /pictures/hdfs-tolerance-3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hdfs-tolerance-3.jpg -------------------------------------------------------------------------------- /pictures/hdfs-tolerance-4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hdfs-tolerance-4.jpg -------------------------------------------------------------------------------- /pictures/hdfs-tolerance-5.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hdfs-tolerance-5.jpg -------------------------------------------------------------------------------- /pictures/hdfs-write-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hdfs-write-1.jpg -------------------------------------------------------------------------------- /pictures/hdfs-write-2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hdfs-write-2.jpg -------------------------------------------------------------------------------- /pictures/hdfs-write-3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hdfs-write-3.jpg -------------------------------------------------------------------------------- /pictures/hdfs-机架.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hdfs-机架.png -------------------------------------------------------------------------------- /pictures/hdfsarchitecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hdfsarchitecture.png -------------------------------------------------------------------------------- /pictures/hdfsdatanodes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hdfsdatanodes.png -------------------------------------------------------------------------------- /pictures/hive-1-2-view.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-1-2-view.png -------------------------------------------------------------------------------- /pictures/hive-beeline-cli.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-beeline-cli.png -------------------------------------------------------------------------------- /pictures/hive-beeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-beeline.png -------------------------------------------------------------------------------- /pictures/hive-data-type.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-data-type.png -------------------------------------------------------------------------------- /pictures/hive-e.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-e.png -------------------------------------------------------------------------------- /pictures/hive-emp-deptno-20-30.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-emp-deptno-20-30.png -------------------------------------------------------------------------------- /pictures/hive-emp-deptno-20.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-emp-deptno-20.png -------------------------------------------------------------------------------- /pictures/hive-emp-ptn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-emp-ptn.png -------------------------------------------------------------------------------- /pictures/hive-emp-ts-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-emp-ts-2.png -------------------------------------------------------------------------------- /pictures/hive-emp-ts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-emp-ts.png -------------------------------------------------------------------------------- /pictures/hive-emp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-emp.png -------------------------------------------------------------------------------- /pictures/hive-external-table.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-external-table.png -------------------------------------------------------------------------------- /pictures/hive-hadoop-bucket.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-hadoop-bucket.png -------------------------------------------------------------------------------- /pictures/hive-hadoop-mapreducer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-hadoop-mapreducer.png -------------------------------------------------------------------------------- /pictures/hive-hadoop-partitation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-hadoop-partitation.png -------------------------------------------------------------------------------- /pictures/hive-index-show.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-index-show.png -------------------------------------------------------------------------------- /pictures/hive-index-table.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-index-table.png -------------------------------------------------------------------------------- /pictures/hive-install-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-install-2.png -------------------------------------------------------------------------------- /pictures/hive-mysql-tables.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-mysql-tables.png -------------------------------------------------------------------------------- /pictures/hive-mysql.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-mysql.png -------------------------------------------------------------------------------- /pictures/hive-n-j.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-n-j.png -------------------------------------------------------------------------------- /pictures/hive-order-by.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-order-by.png -------------------------------------------------------------------------------- /pictures/hive-ouput.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-ouput.png -------------------------------------------------------------------------------- /pictures/hive-right-join.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-right-join.png -------------------------------------------------------------------------------- /pictures/hive-select-emp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-select-emp.png -------------------------------------------------------------------------------- /pictures/hive-show-database.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-show-database.png -------------------------------------------------------------------------------- /pictures/hive-view-properties.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive-view-properties.png -------------------------------------------------------------------------------- /pictures/hive.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive.jpg -------------------------------------------------------------------------------- /pictures/hive体系架构.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/hive体系架构.png -------------------------------------------------------------------------------- /pictures/idea-newproject-scala.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/idea-newproject-scala.png -------------------------------------------------------------------------------- /pictures/idea-scala-2.1.8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/idea-scala-2.1.8.png -------------------------------------------------------------------------------- /pictures/idea-scala-change.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/idea-scala-change.png -------------------------------------------------------------------------------- /pictures/idea-scala-plugin.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/idea-scala-plugin.png -------------------------------------------------------------------------------- /pictures/idea-scala-select.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/idea-scala-select.png -------------------------------------------------------------------------------- /pictures/ifconfig.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/ifconfig.png -------------------------------------------------------------------------------- /pictures/ipconfig.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/ipconfig.png -------------------------------------------------------------------------------- /pictures/jar-with-dependencies.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/jar-with-dependencies.png -------------------------------------------------------------------------------- /pictures/kafka-BIO.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka-BIO.png -------------------------------------------------------------------------------- /pictures/kafka-cluster-shell.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka-cluster-shell.png -------------------------------------------------------------------------------- /pictures/kafka-cluster.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka-cluster.png -------------------------------------------------------------------------------- /pictures/kafka-compress-message.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka-compress-message.png -------------------------------------------------------------------------------- /pictures/kafka-consumer01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka-consumer01.png -------------------------------------------------------------------------------- /pictures/kafka-consumer02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka-consumer02.png -------------------------------------------------------------------------------- /pictures/kafka-producer-consumer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka-producer-consumer.png -------------------------------------------------------------------------------- /pictures/kafka-send-messgaes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka-send-messgaes.png -------------------------------------------------------------------------------- /pictures/kafka-simple-producer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka-simple-producer.png -------------------------------------------------------------------------------- /pictures/kafka-topic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka-topic.png -------------------------------------------------------------------------------- /pictures/kafka-元数据请求.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka-元数据请求.png -------------------------------------------------------------------------------- /pictures/kafka-分区副本.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka-分区副本.png -------------------------------------------------------------------------------- /pictures/kafka-数据可见性.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka-数据可见性.png -------------------------------------------------------------------------------- /pictures/kafka-零拷贝.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka-零拷贝.png -------------------------------------------------------------------------------- /pictures/kafka.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka.png -------------------------------------------------------------------------------- /pictures/kafka消费者.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/kafka消费者.png -------------------------------------------------------------------------------- /pictures/mapreduce-combiner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/mapreduce-combiner.png -------------------------------------------------------------------------------- /pictures/mapreduce-sort.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/mapreduce-sort.png -------------------------------------------------------------------------------- /pictures/mapreduce-with-combiners.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/mapreduce-with-combiners.png -------------------------------------------------------------------------------- /pictures/mapreduce-without-combiners.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/mapreduce-without-combiners.png -------------------------------------------------------------------------------- /pictures/mapreduceProcess.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/mapreduceProcess.png -------------------------------------------------------------------------------- /pictures/mutli-net-ip.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/mutli-net-ip.png -------------------------------------------------------------------------------- /pictures/oozie.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/oozie.jpg -------------------------------------------------------------------------------- /pictures/phoenix-core-jar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/phoenix-core-jar.png -------------------------------------------------------------------------------- /pictures/phoenix-shell.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/phoenix-shell.png -------------------------------------------------------------------------------- /pictures/readfromkafka.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/readfromkafka.png -------------------------------------------------------------------------------- /pictures/relationships-worker-processes-executors-tasks.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/relationships-worker-processes-executors-tasks.png -------------------------------------------------------------------------------- /pictures/scala-collection-imm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala-collection-imm.png -------------------------------------------------------------------------------- /pictures/scala-collection-m.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala-collection-m.png -------------------------------------------------------------------------------- /pictures/scala-collection.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala-collection.png -------------------------------------------------------------------------------- /pictures/scala-hello-world.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala-hello-world.png -------------------------------------------------------------------------------- /pictures/scala-int+.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala-int+.png -------------------------------------------------------------------------------- /pictures/scala-ordered-ordering.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala-ordered-ordering.png -------------------------------------------------------------------------------- /pictures/scala-other-resources.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala-other-resources.png -------------------------------------------------------------------------------- /pictures/scala-plugin.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala-plugin.png -------------------------------------------------------------------------------- /pictures/scala-richInt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala-richInt.png -------------------------------------------------------------------------------- /pictures/scala-sdk.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala-sdk.png -------------------------------------------------------------------------------- /pictures/scala-select.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala-select.png -------------------------------------------------------------------------------- /pictures/scala-shell.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala-shell.png -------------------------------------------------------------------------------- /pictures/scala-分区数.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala-分区数.png -------------------------------------------------------------------------------- /pictures/scala-操作符优先级.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala-操作符优先级.png -------------------------------------------------------------------------------- /pictures/scala-视图界定.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala-视图界定.png -------------------------------------------------------------------------------- /pictures/scala.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala.jpg -------------------------------------------------------------------------------- /pictures/scala带有特质的对象.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala带有特质的对象.png -------------------------------------------------------------------------------- /pictures/scala继承层次.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/scala继承层次.png -------------------------------------------------------------------------------- /pictures/spark-Big-table–to–big-table.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-Big-table–to–big-table.png -------------------------------------------------------------------------------- /pictures/spark-Big-table–to–small-table.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-Big-table–to–small-table.png -------------------------------------------------------------------------------- /pictures/spark-DAG.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-DAG.png -------------------------------------------------------------------------------- /pictures/spark-Logical-Planning.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-Logical-Planning.png -------------------------------------------------------------------------------- /pictures/spark-Physical-Planning.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-Physical-Planning.png -------------------------------------------------------------------------------- /pictures/spark-Standalone-web-ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-Standalone-web-ui.png -------------------------------------------------------------------------------- /pictures/spark-aggregateByKey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-aggregateByKey.png -------------------------------------------------------------------------------- /pictures/spark-dataFrame+RDDs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-dataFrame+RDDs.png -------------------------------------------------------------------------------- /pictures/spark-download.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-download.png -------------------------------------------------------------------------------- /pictures/spark-flume-console.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-flume-console.png -------------------------------------------------------------------------------- /pictures/spark-flume-input.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-flume-input.png -------------------------------------------------------------------------------- /pictures/spark-getpartnum.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-getpartnum.png -------------------------------------------------------------------------------- /pictures/spark-mysql-分区上下限.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-mysql-分区上下限.png -------------------------------------------------------------------------------- /pictures/spark-pi.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-pi.png -------------------------------------------------------------------------------- /pictures/spark-reducebykey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-reducebykey.png -------------------------------------------------------------------------------- /pictures/spark-scheme.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-scheme.png -------------------------------------------------------------------------------- /pictures/spark-shell-local.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-shell-local.png -------------------------------------------------------------------------------- /pictures/spark-shell-web-ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-shell-web-ui.png -------------------------------------------------------------------------------- /pictures/spark-shell.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-shell.png -------------------------------------------------------------------------------- /pictures/spark-sql-NATURAL-JOIN.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-sql-NATURAL-JOIN.png -------------------------------------------------------------------------------- /pictures/spark-sql-shell.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-sql-shell.png -------------------------------------------------------------------------------- /pictures/spark-sql-自定义函数.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-sql-自定义函数.png -------------------------------------------------------------------------------- /pictures/spark-stack.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-stack.png -------------------------------------------------------------------------------- /pictures/spark-straming-kafka-console.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-straming-kafka-console.png -------------------------------------------------------------------------------- /pictures/spark-streaming-arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-streaming-arch.png -------------------------------------------------------------------------------- /pictures/spark-streaming-dstream-ops.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-streaming-dstream-ops.png -------------------------------------------------------------------------------- /pictures/spark-streaming-flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-streaming-flow.png -------------------------------------------------------------------------------- /pictures/spark-streaming-flume-jar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-streaming-flume-jar.png -------------------------------------------------------------------------------- /pictures/spark-streaming-word-count-v1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-streaming-word-count-v1.png -------------------------------------------------------------------------------- /pictures/spark-streaming-word-count-v2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-streaming-word-count-v2.png -------------------------------------------------------------------------------- /pictures/spark-streaming-word-count-v3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-streaming-word-count-v3.png -------------------------------------------------------------------------------- /pictures/spark-structure-api.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-structure-api.png -------------------------------------------------------------------------------- /pictures/spark-unifed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-unifed.png -------------------------------------------------------------------------------- /pictures/spark-web-ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-web-ui.png -------------------------------------------------------------------------------- /pictures/spark-内存不足.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-内存不足.png -------------------------------------------------------------------------------- /pictures/spark-内存不足2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-内存不足2.png -------------------------------------------------------------------------------- /pictures/spark-分区.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-分区.png -------------------------------------------------------------------------------- /pictures/spark-窄依赖和宽依赖.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-窄依赖和宽依赖.png -------------------------------------------------------------------------------- /pictures/spark-累加器1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-累加器1.png -------------------------------------------------------------------------------- /pictures/spark-累加器2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-累加器2.png -------------------------------------------------------------------------------- /pictures/spark-累加器方法.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-累加器方法.png -------------------------------------------------------------------------------- /pictures/spark-运行安全.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-运行安全.png -------------------------------------------------------------------------------- /pictures/spark-运行时类型安全.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-运行时类型安全.png -------------------------------------------------------------------------------- /pictures/spark-集群搭建1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-集群搭建1.png -------------------------------------------------------------------------------- /pictures/spark-集群搭建2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-集群搭建2.png -------------------------------------------------------------------------------- /pictures/spark-集群搭建3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-集群搭建3.png -------------------------------------------------------------------------------- /pictures/spark-集群搭建4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-集群搭建4.png -------------------------------------------------------------------------------- /pictures/spark-集群搭建5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-集群搭建5.png -------------------------------------------------------------------------------- /pictures/spark-集群模式.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark-集群模式.png -------------------------------------------------------------------------------- /pictures/spark.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark.jpg -------------------------------------------------------------------------------- /pictures/spark集群规划.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spark集群规划.png -------------------------------------------------------------------------------- /pictures/spout-bolt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spout-bolt.png -------------------------------------------------------------------------------- /pictures/spring-boot-mybatis-phoenix.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spring-boot-mybatis-phoenix.png -------------------------------------------------------------------------------- /pictures/spring-mybatis-phoenix.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/spring-mybatis-phoenix.png -------------------------------------------------------------------------------- /pictures/sql-hive-arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sql-hive-arch.png -------------------------------------------------------------------------------- /pictures/sql-join.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sql-join.jpg -------------------------------------------------------------------------------- /pictures/sqoop-help.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop-help.png -------------------------------------------------------------------------------- /pictures/sqoop-hive-hdfs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop-hive-hdfs.png -------------------------------------------------------------------------------- /pictures/sqoop-hive-location.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop-hive-location.png -------------------------------------------------------------------------------- /pictures/sqoop-list-databases.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop-list-databases.png -------------------------------------------------------------------------------- /pictures/sqoop-map-task.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop-map-task.png -------------------------------------------------------------------------------- /pictures/sqoop-mysql-connect.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop-mysql-connect.png -------------------------------------------------------------------------------- /pictures/sqoop-mysql-jar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop-mysql-jar.png -------------------------------------------------------------------------------- /pictures/sqoop-tool.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop-tool.png -------------------------------------------------------------------------------- /pictures/sqoop-version-selected.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop-version-selected.png -------------------------------------------------------------------------------- /pictures/sqoop-version.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop-version.png -------------------------------------------------------------------------------- /pictures/sqoop.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop.png -------------------------------------------------------------------------------- /pictures/sqoop_hbase.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop_hbase.png -------------------------------------------------------------------------------- /pictures/sqoop_hdfs_ls.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop_hdfs_ls.png -------------------------------------------------------------------------------- /pictures/sqoop_hive_error.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop_hive_error.png -------------------------------------------------------------------------------- /pictures/sqoop_hive_success.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop_hive_success.png -------------------------------------------------------------------------------- /pictures/sqoop_hive_table.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop_hive_table.png -------------------------------------------------------------------------------- /pictures/sqoop_hive_tables.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/sqoop_hive_tables.png -------------------------------------------------------------------------------- /pictures/store-redis-manager.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/store-redis-manager.png -------------------------------------------------------------------------------- /pictures/storm-Redis-Mapper.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-Redis-Mapper.png -------------------------------------------------------------------------------- /pictures/storm-abstractRedisBolt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-abstractRedisBolt.png -------------------------------------------------------------------------------- /pictures/storm-baseRichSpout.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-baseRichSpout.png -------------------------------------------------------------------------------- /pictures/storm-baseRichbolt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-baseRichbolt.png -------------------------------------------------------------------------------- /pictures/storm-bolts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-bolts.png -------------------------------------------------------------------------------- /pictures/storm-flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-flow.png -------------------------------------------------------------------------------- /pictures/storm-hbase-result.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-hbase-result.png -------------------------------------------------------------------------------- /pictures/storm-hdfs-result.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-hdfs-result.png -------------------------------------------------------------------------------- /pictures/storm-jar-complie-error.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-jar-complie-error.png -------------------------------------------------------------------------------- /pictures/storm-jar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-jar.png -------------------------------------------------------------------------------- /pictures/storm-jar2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-jar2.png -------------------------------------------------------------------------------- /pictures/storm-jedicCommands.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-jedicCommands.png -------------------------------------------------------------------------------- /pictures/storm-kafka-producer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-kafka-producer.png -------------------------------------------------------------------------------- /pictures/storm-kafka-receiver.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-kafka-receiver.png -------------------------------------------------------------------------------- /pictures/storm-lib.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-lib.png -------------------------------------------------------------------------------- /pictures/storm-list-kill.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-list-kill.png -------------------------------------------------------------------------------- /pictures/storm-package-error.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-package-error.png -------------------------------------------------------------------------------- /pictures/storm-spouts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-spouts.png -------------------------------------------------------------------------------- /pictures/storm-streams.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-streams.png -------------------------------------------------------------------------------- /pictures/storm-submit-success.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-submit-success.png -------------------------------------------------------------------------------- /pictures/storm-topology.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-topology.png -------------------------------------------------------------------------------- /pictures/storm-tuples.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-tuples.png -------------------------------------------------------------------------------- /pictures/storm-ui-actions.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-ui-actions.png -------------------------------------------------------------------------------- /pictures/storm-web-ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-web-ui.png -------------------------------------------------------------------------------- /pictures/storm-word-count-console.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-word-count-console.png -------------------------------------------------------------------------------- /pictures/storm-word-count-p.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-word-count-p.png -------------------------------------------------------------------------------- /pictures/storm-wordcounttoredis.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-wordcounttoredis.png -------------------------------------------------------------------------------- /pictures/storm-集群-shell.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-集群-shell.png -------------------------------------------------------------------------------- /pictures/storm-集群搭建1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-集群搭建1.png -------------------------------------------------------------------------------- /pictures/storm-集群规划.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm-集群规划.png -------------------------------------------------------------------------------- /pictures/storm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm.png -------------------------------------------------------------------------------- /pictures/storm集群.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm集群.png -------------------------------------------------------------------------------- /pictures/storm集群搭建2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/storm集群搭建2.png -------------------------------------------------------------------------------- /pictures/streaming-flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/streaming-flow.png -------------------------------------------------------------------------------- /pictures/strom-kafka-consumer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/strom-kafka-consumer.png -------------------------------------------------------------------------------- /pictures/topology-tasks.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/topology-tasks.png -------------------------------------------------------------------------------- /pictures/virtualbox-multi-network.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/virtualbox-multi-network.png -------------------------------------------------------------------------------- /pictures/virtualbox启用网络.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/virtualbox启用网络.png -------------------------------------------------------------------------------- /pictures/weixin-desc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/weixin-desc.png -------------------------------------------------------------------------------- /pictures/weixin-normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/weixin-normal.png -------------------------------------------------------------------------------- /pictures/weixin.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/weixin.jpg -------------------------------------------------------------------------------- /pictures/writetokafka.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/writetokafka.png -------------------------------------------------------------------------------- /pictures/yarn-base.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/yarn-base.png -------------------------------------------------------------------------------- /pictures/yarn工作原理.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/yarn工作原理.png -------------------------------------------------------------------------------- /pictures/yarn工作原理简图.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/yarn工作原理简图.png -------------------------------------------------------------------------------- /pictures/zookeeper-brocast.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/zookeeper-brocast.jpg -------------------------------------------------------------------------------- /pictures/zookeeper-cluster.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/zookeeper-cluster.png -------------------------------------------------------------------------------- /pictures/zookeeper-hadoop001.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/zookeeper-hadoop001.png -------------------------------------------------------------------------------- /pictures/zookeeper-hadoop002.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/zookeeper-hadoop002.png -------------------------------------------------------------------------------- /pictures/zookeeper-hadoop003.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/zookeeper-hadoop003.png -------------------------------------------------------------------------------- /pictures/zookeeper-super.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/zookeeper-super.png -------------------------------------------------------------------------------- /pictures/zookeeper-zkcomponents.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/zookeeper-zkcomponents.jpg -------------------------------------------------------------------------------- /pictures/zookeeper-zknamespace.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/zookeeper-zknamespace.jpg -------------------------------------------------------------------------------- /pictures/zookeeper-zkservice.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/zookeeper-zkservice.jpg -------------------------------------------------------------------------------- /pictures/zookeeper.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/zookeeper.jpg -------------------------------------------------------------------------------- /pictures/大数据处理简化流程.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/大数据处理简化流程.png -------------------------------------------------------------------------------- /pictures/大数据技术栈思维导图.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/大数据技术栈思维导图.png -------------------------------------------------------------------------------- /pictures/大数据技术栈思维导图.xmind: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/pictures/大数据技术栈思维导图.xmind -------------------------------------------------------------------------------- /resources/csv/dept.csv: -------------------------------------------------------------------------------- 1 | 10,ACCOUNTING,NEW YORK 2 | 20,RESEARCH,DALLAS 3 | 30,SALES,CHICAGO 4 | 40,OPERATIONS,BOSTON 5 | -------------------------------------------------------------------------------- /resources/json/dept.json: -------------------------------------------------------------------------------- 1 | {"DEPTNO": 10,"DNAME": "ACCOUNTING","LOC": "NEW YORK"} 2 | {"DEPTNO": 20,"DNAME": "RESEARCH","LOC": "DALLAS"} 3 | {"DEPTNO": 30,"DNAME": "SALES","LOC": "CHICAGO"} 4 | {"DEPTNO": 40,"DNAME": "OPERATIONS","LOC": "BOSTON"} -------------------------------------------------------------------------------- /resources/json/emp.json: -------------------------------------------------------------------------------- 1 | {"EMPNO": 7369,"ENAME": "SMITH","JOB": "CLERK","MGR": 7902,"HIREDATE": "1980-12-17 00:00:00","SAL": 800.00,"COMM": null,"DEPTNO": 20} 2 | {"EMPNO": 7499,"ENAME": "ALLEN","JOB": "SALESMAN","MGR": 7698,"HIREDATE": "1981-02-20 00:00:00","SAL": 1600.00,"COMM": 300.00,"DEPTNO": 30} 3 | {"EMPNO": 7521,"ENAME": "WARD","JOB": "SALESMAN","MGR": 7698,"HIREDATE": "1981-02-22 00:00:00","SAL": 1250.00,"COMM": 500.00,"DEPTNO": 30} 4 | {"EMPNO": 7566,"ENAME": "JONES","JOB": "MANAGER","MGR": 7839,"HIREDATE": "1981-04-02 00:00:00","SAL": 2975.00,"COMM": null,"DEPTNO": 20} 5 | {"EMPNO": 7654,"ENAME": "MARTIN","JOB": "SALESMAN","MGR": 7698,"HIREDATE": "1981-09-28 00:00:00","SAL": 1250.00,"COMM": 1400.00,"DEPTNO": 30} 6 | {"EMPNO": 7698,"ENAME": "BLAKE","JOB": "MANAGER","MGR": 7839,"HIREDATE": "1981-05-01 00:00:00","SAL": 2850.00,"COMM": null,"DEPTNO": 30} 7 | {"EMPNO": 7782,"ENAME": "CLARK","JOB": "MANAGER","MGR": 7839,"HIREDATE": "1981-06-09 00:00:00","SAL": 2450.00,"COMM": null,"DEPTNO": 10} 8 | {"EMPNO": 7788,"ENAME": "SCOTT","JOB": "ANALYST","MGR": 7566,"HIREDATE": "1987-04-19 00:00:00","SAL": 1500.00,"COMM": null,"DEPTNO": 20} 9 | {"EMPNO": 7839,"ENAME": "KING","JOB": "PRESIDENT","MGR": null,"HIREDATE": "1981-11-17 00:00:00","SAL": 5000.00,"COMM": null,"DEPTNO": 10} 10 | {"EMPNO": 7844,"ENAME": "TURNER","JOB": "SALESMAN","MGR": 7698,"HIREDATE": "1981-09-08 00:00:00","SAL": 1500.00,"COMM": 0.00,"DEPTNO": 30} 11 | {"EMPNO": 7876,"ENAME": "ADAMS","JOB": "CLERK","MGR": 7788,"HIREDATE": "1987-05-23 00:00:00","SAL": 1100.00,"COMM": null,"DEPTNO": 20} 12 | {"EMPNO": 7900,"ENAME": "JAMES","JOB": "CLERK","MGR": 7698,"HIREDATE": "1981-12-03 00:00:00","SAL": 950.00,"COMM": null,"DEPTNO": 30} 13 | {"EMPNO": 7902,"ENAME": "FORD","JOB": "ANALYST","MGR": 7566,"HIREDATE": "1981-12-03 00:00:00","SAL": 3000.00,"COMM": null,"DEPTNO": 20} 14 | {"EMPNO": 7934,"ENAME": "MILLER","JOB": "CLERK","MGR": 7782,"HIREDATE": "1982-01-23 00:00:00","SAL": 1300.00,"COMM": null,"DEPTNO": 10} -------------------------------------------------------------------------------- /resources/mysql-connector-java-5.1.47.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/resources/mysql-connector-java-5.1.47.jar -------------------------------------------------------------------------------- /resources/orc/dept.orc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/resources/orc/dept.orc -------------------------------------------------------------------------------- /resources/parquet/dept.parquet: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/resources/parquet/dept.parquet -------------------------------------------------------------------------------- /resources/parquet/emp.parquet: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/heibaiying/BigData-Notes/3898939aca387c25b3eb4e51ef49dfccca8543ed/resources/parquet/emp.parquet -------------------------------------------------------------------------------- /resources/tsv/dept.tsv: -------------------------------------------------------------------------------- 1 | 10 ACCOUNTING NEW YORK 2 | 20 RESEARCH DALLAS 3 | 30 SALES CHICAGO 4 | 40 OPERATIONS BOSTON 5 | -------------------------------------------------------------------------------- /resources/tsv/emp.tsv: -------------------------------------------------------------------------------- 1 | 7369 SMITH CLERK 7902 1980-12-17 00:00:00 800.00 20 2 | 7499 ALLEN SALESMAN 7698 1981-02-20 00:00:00 1600.00 300.00 30 3 | 7521 WARD SALESMAN 7698 1981-02-22 00:00:00 1250.00 500.00 30 4 | 7566 JONES MANAGER 7839 1981-04-02 00:00:00 2975.00 20 5 | 7654 MARTIN SALESMAN 7698 1981-09-28 00:00:00 1250.00 1400.00 30 6 | 7698 BLAKE MANAGER 7839 1981-05-01 00:00:00 2850.00 30 7 | 7782 CLARK MANAGER 7839 1981-06-09 00:00:00 2450.00 10 8 | 7788 SCOTT ANALYST 7566 1987-04-19 00:00:00 1500.00 20 9 | 7839 KING PRESIDENT 1981-11-17 00:00:00 5000.00 10 10 | 7844 TURNER SALESMAN 7698 1981-09-08 00:00:00 1500.00 0.00 30 11 | 7876 ADAMS CLERK 7788 1987-05-23 00:00:00 1100.00 20 12 | 7900 JAMES CLERK 7698 1981-12-03 00:00:00 950.00 30 13 | 7902 FORD ANALYST 7566 1981-12-03 00:00:00 3000.00 20 14 | 7934 MILLER CLERK 7782 1982-01-23 00:00:00 1300.00 10 15 | -------------------------------------------------------------------------------- /resources/txt/dept.txt: -------------------------------------------------------------------------------- 1 | 10 ACCOUNTING NEW YORK 2 | 20 RESEARCH DALLAS 3 | 30 SALES CHICAGO 4 | 40 OPERATIONS BOSTON 5 | -------------------------------------------------------------------------------- /resources/txt/emp.txt: -------------------------------------------------------------------------------- 1 | 7369 SMITH CLERK 7902 1980-12-17 00:00:00 800.00 20 2 | 7499 ALLEN SALESMAN 7698 1981-02-20 00:00:00 1600.00 300.00 30 3 | 7521 WARD SALESMAN 7698 1981-02-22 00:00:00 1250.00 500.00 30 4 | 7566 JONES MANAGER 7839 1981-04-02 00:00:00 2975.00 20 5 | 7654 MARTIN SALESMAN 7698 1981-09-28 00:00:00 1250.00 1400.00 30 6 | 7698 BLAKE MANAGER 7839 1981-05-01 00:00:00 2850.00 30 7 | 7782 CLARK MANAGER 7839 1981-06-09 00:00:00 2450.00 10 8 | 7788 SCOTT ANALYST 7566 1987-04-19 00:00:00 1500.00 20 9 | 7839 KING PRESIDENT 1981-11-17 00:00:00 5000.00 10 10 | 7844 TURNER SALESMAN 7698 1981-09-08 00:00:00 1500.00 0.00 30 11 | 7876 ADAMS CLERK 7788 1987-05-23 00:00:00 1100.00 20 12 | 7900 JAMES CLERK 7698 1981-12-03 00:00:00 950.00 30 13 | 7902 FORD ANALYST 7566 1981-12-03 00:00:00 3000.00 20 14 | 7934 MILLER CLERK 7782 1982-01-23 00:00:00 1300.00 10 15 | --------------------------------------------------------------------------------