├── .idea
├── compiler.xml
├── copyright
│ └── profiles_settings.xml
├── libraries
│ └── SBT__org_scala_lang_scala_library_2_11_8_jar.xml
├── misc.xml
├── modules.xml
├── modules
│ ├── spark_recommend-build.iml
│ └── spark_recommend.iml
├── sbt.xml
├── scala_compiler.xml
├── uiDesigner.xml
├── vcs.xml
└── workspace.xml
├── README.md
├── build.sbt
├── checkpoint
└── 1cc9f243-2b19-4f2c-934a-88ec14e83ba6
│ ├── rdd-121
│ ├── ._partitioner.crc
│ ├── .part-00000.crc
│ ├── .part-00001.crc
│ ├── _partitioner
│ ├── part-00000
│ └── part-00001
│ ├── rdd-181
│ ├── .part-00000.crc
│ ├── .part-00001.crc
│ ├── part-00000
│ └── part-00001
│ └── rdd-61
│ ├── ._partitioner.crc
│ ├── .part-00000.crc
│ ├── .part-00001.crc
│ ├── _partitioner
│ ├── part-00000
│ └── part-00001
├── project
├── build.properties
├── plugins.sbt
├── project
│ └── target
│ │ └── config-classes
│ │ ├── $9f0a30d05c6bc0ace695$$anonfun$$sbtdef$1.class
│ │ ├── $9f0a30d05c6bc0ace695$.class
│ │ ├── $9f0a30d05c6bc0ace695.cache
│ │ └── $9f0a30d05c6bc0ace695.class
└── target
│ ├── config-classes
│ ├── $16d5d8d1446d5290a2c6$$anonfun$$sbtdef$1.class
│ ├── $16d5d8d1446d5290a2c6$.class
│ ├── $16d5d8d1446d5290a2c6.cache
│ ├── $16d5d8d1446d5290a2c6.class
│ ├── $422f0cca637471d8282b$$anonfun$$sbtdef$1.class
│ ├── $422f0cca637471d8282b$.class
│ ├── $422f0cca637471d8282b.cache
│ ├── $422f0cca637471d8282b.class
│ ├── $53fa29f8c78371e1ae05$$anonfun$$sbtdef$1.class
│ ├── $53fa29f8c78371e1ae05$.class
│ ├── $53fa29f8c78371e1ae05.cache
│ ├── $53fa29f8c78371e1ae05.class
│ ├── $6ab7769ba7b5d43838ab$$anonfun$$sbtdef$1.class
│ ├── $6ab7769ba7b5d43838ab$.class
│ ├── $6ab7769ba7b5d43838ab.cache
│ ├── $6ab7769ba7b5d43838ab.class
│ ├── $73ba6e6e6fcb8ddc8674$$anonfun$$sbtdef$1$$anonfun$apply$1.class
│ ├── $73ba6e6e6fcb8ddc8674$$anonfun$$sbtdef$1.class
│ ├── $73ba6e6e6fcb8ddc8674$.class
│ ├── $73ba6e6e6fcb8ddc8674.cache
│ ├── $73ba6e6e6fcb8ddc8674.class
│ ├── $9148c6b7792a232e3c20$$anonfun$$sbtdef$1.class
│ ├── $9148c6b7792a232e3c20$.class
│ ├── $9148c6b7792a232e3c20.cache
│ └── $9148c6b7792a232e3c20.class
│ ├── resolution-cache
│ ├── default
│ │ └── spark_recommend-build
│ │ │ └── scala_2.10
│ │ │ └── sbt_0.13
│ │ │ └── 0.1-SNAPSHOT
│ │ │ ├── resolved.xml.properties
│ │ │ └── resolved.xml.xml
│ └── reports
│ │ ├── default-spark_recommend-build-compile-internal.xml
│ │ ├── default-spark_recommend-build-compile.xml
│ │ ├── default-spark_recommend-build-docs.xml
│ │ ├── default-spark_recommend-build-optional.xml
│ │ ├── default-spark_recommend-build-plugin.xml
│ │ ├── default-spark_recommend-build-pom.xml
│ │ ├── default-spark_recommend-build-provided.xml
│ │ ├── default-spark_recommend-build-runtime-internal.xml
│ │ ├── default-spark_recommend-build-runtime.xml
│ │ ├── default-spark_recommend-build-scala-tool.xml
│ │ ├── default-spark_recommend-build-sources.xml
│ │ ├── default-spark_recommend-build-test-internal.xml
│ │ ├── default-spark_recommend-build-test.xml
│ │ ├── ivy-report.css
│ │ └── ivy-report.xsl
│ └── streams
│ ├── $global
│ ├── $global
│ │ └── $global
│ │ │ └── streams
│ │ │ └── out
│ ├── dependencyPositions
│ │ └── $global
│ │ │ └── streams
│ │ │ └── update_cache_2.10
│ │ │ ├── input_dsp
│ │ │ └── output_dsp
│ ├── ivyConfiguration
│ │ └── $global
│ │ │ └── streams
│ │ │ └── out
│ ├── ivySbt
│ │ └── $global
│ │ │ └── streams
│ │ │ └── out
│ ├── projectDescriptors
│ │ └── $global
│ │ │ └── streams
│ │ │ └── out
│ └── update
│ │ └── $global
│ │ └── streams
│ │ ├── out
│ │ └── update_cache_2.10
│ │ ├── inputs
│ │ └── output
│ ├── compile
│ ├── $global
│ │ └── $global
│ │ │ └── discoveredMainClasses
│ │ │ └── data
│ ├── compile
│ │ └── $global
│ │ │ └── streams
│ │ │ └── out
│ ├── compileIncremental
│ │ └── $global
│ │ │ └── streams
│ │ │ ├── export
│ │ │ └── out
│ ├── copyResources
│ │ └── $global
│ │ │ └── streams
│ │ │ ├── copy-resources
│ │ │ └── out
│ ├── dependencyClasspath
│ │ └── $global
│ │ │ └── streams
│ │ │ └── export
│ ├── exportedProducts
│ │ └── $global
│ │ │ └── streams
│ │ │ └── export
│ ├── externalDependencyClasspath
│ │ └── $global
│ │ │ └── streams
│ │ │ └── export
│ ├── internalDependencyClasspath
│ │ └── $global
│ │ │ └── streams
│ │ │ └── export
│ ├── managedClasspath
│ │ └── $global
│ │ │ └── streams
│ │ │ └── export
│ ├── unmanagedClasspath
│ │ └── $global
│ │ │ └── streams
│ │ │ └── export
│ └── unmanagedJars
│ │ └── $global
│ │ └── streams
│ │ └── export
│ └── runtime
│ ├── dependencyClasspath
│ └── $global
│ │ └── streams
│ │ └── export
│ ├── exportedProducts
│ └── $global
│ │ └── streams
│ │ └── export
│ ├── externalDependencyClasspath
│ └── $global
│ │ └── streams
│ │ └── export
│ ├── fullClasspath
│ └── $global
│ │ └── streams
│ │ └── export
│ ├── internalDependencyClasspath
│ └── $global
│ │ └── streams
│ │ └── export
│ ├── managedClasspath
│ └── $global
│ │ └── streams
│ │ └── export
│ ├── unmanagedClasspath
│ └── $global
│ │ └── streams
│ │ └── export
│ └── unmanagedJars
│ └── $global
│ └── streams
│ └── export
├── src
└── main
│ ├── java
│ ├── hbase
│ │ ├── Hbase_CURD.java
│ │ └── readme.txt
│ └── kafka
│ │ ├── KafkaConsumer.java
│ │ └── KafkaProducer.java
│ ├── resources
│ └── hbase-site.xml
│ └── scala
│ ├── hbase
│ ├── Hbase_CRUD.scala
│ └── Spark_RDD.scala
│ ├── matrix
│ └── TRowMatrix.scala
│ ├── spark_streaming
│ ├── Demo.scala
│ ├── Kafka_similar.scala
│ └── Kafka_wc.scala
│ └── tags
│ ├── HbaseMatrix.scala
│ └── localMatrix.scala
└── target
├── .history
├── resolution-cache
├── default
│ ├── spark_recommend$sbt_2.11
│ │ └── 1.0
│ │ │ ├── resolved.xml.properties
│ │ │ └── resolved.xml.xml
│ ├── spark_recommend$sources_2.11
│ │ └── 1.0
│ │ │ ├── resolved.xml.properties
│ │ │ └── resolved.xml.xml
│ └── spark_recommend_2.11
│ │ └── 1.0
│ │ ├── resolved.xml.properties
│ │ └── resolved.xml.xml
└── reports
│ ├── default-spark_recommend$sbt_2.11-default.xml
│ ├── default-spark_recommend$sources_2.11-compile-internal.xml
│ ├── default-spark_recommend$sources_2.11-compile.xml
│ ├── default-spark_recommend$sources_2.11-docs.xml
│ ├── default-spark_recommend$sources_2.11-optional.xml
│ ├── default-spark_recommend$sources_2.11-plugin.xml
│ ├── default-spark_recommend$sources_2.11-pom.xml
│ ├── default-spark_recommend$sources_2.11-provided.xml
│ ├── default-spark_recommend$sources_2.11-runtime-internal.xml
│ ├── default-spark_recommend$sources_2.11-runtime.xml
│ ├── default-spark_recommend$sources_2.11-scala-tool.xml
│ ├── default-spark_recommend$sources_2.11-sources.xml
│ ├── default-spark_recommend$sources_2.11-test-internal.xml
│ ├── default-spark_recommend$sources_2.11-test.xml
│ ├── default-spark_recommend_2.11-compile-internal.xml
│ ├── default-spark_recommend_2.11-compile.xml
│ ├── default-spark_recommend_2.11-docs.xml
│ ├── default-spark_recommend_2.11-optional.xml
│ ├── default-spark_recommend_2.11-plugin.xml
│ ├── default-spark_recommend_2.11-pom.xml
│ ├── default-spark_recommend_2.11-provided.xml
│ ├── default-spark_recommend_2.11-runtime-internal.xml
│ ├── default-spark_recommend_2.11-runtime.xml
│ ├── default-spark_recommend_2.11-scala-tool.xml
│ ├── default-spark_recommend_2.11-sources.xml
│ ├── default-spark_recommend_2.11-test-internal.xml
│ ├── default-spark_recommend_2.11-test.xml
│ ├── ivy-report.css
│ └── ivy-report.xsl
├── scala-2.11
└── classes
│ ├── hbase-site.xml
│ ├── hbase
│ ├── Hbase_CRUD$$anonfun$1.class
│ ├── Hbase_CRUD$.class
│ ├── Hbase_CRUD$delayedInit$body.class
│ ├── Hbase_CRUD.class
│ ├── Hbase_CURD.class
│ ├── Spark_RDD$$anonfun$1.class
│ ├── Spark_RDD$$anonfun$2.class
│ ├── Spark_RDD$$anonfun$3.class
│ ├── Spark_RDD$$anonfun$4.class
│ ├── Spark_RDD$.class
│ ├── Spark_RDD$delayedInit$body.class
│ ├── Spark_RDD.class
│ └── readme.txt
│ ├── kafka
│ ├── KafkaConsumer.class
│ └── KafkaProducer.class
│ ├── matrix
│ ├── TRowMatrix$$anonfun$1.class
│ ├── TRowMatrix$$anonfun$2.class
│ ├── TRowMatrix$$anonfun$3.class
│ ├── TRowMatrix$$anonfun$4.class
│ ├── TRowMatrix$$anonfun$matrix$TRowMatrix$$buildRow$1.class
│ ├── TRowMatrix$$anonfun$matrix$TRowMatrix$$rowToTransposedTriplet$1.class
│ ├── TRowMatrix$.class
│ └── TRowMatrix.class
│ ├── spark_streaming
│ ├── Demo$$anonfun$1.class
│ ├── Demo$$anonfun$2.class
│ ├── Demo$$anonfun$3.class
│ ├── Demo$.class
│ ├── Demo$delayedInit$body.class
│ ├── Demo.class
│ ├── Kafka_similar$.class
│ ├── Kafka_similar$delayedInit$body.class
│ ├── Kafka_similar.class
│ ├── Kafka_wc$$anonfun$1.class
│ ├── Kafka_wc$$anonfun$2.class
│ ├── Kafka_wc$$anonfun$3.class
│ ├── Kafka_wc$$anonfun$4.class
│ ├── Kafka_wc$$anonfun$5.class
│ ├── Kafka_wc$$anonfun$6.class
│ ├── Kafka_wc$.class
│ ├── Kafka_wc$delayedInit$body.class
│ └── Kafka_wc.class
│ └── tags
│ ├── HbaseMatrix$$anonfun$1.class
│ ├── HbaseMatrix$$anonfun$10.class
│ ├── HbaseMatrix$$anonfun$11.class
│ ├── HbaseMatrix$$anonfun$2.class
│ ├── HbaseMatrix$$anonfun$3.class
│ ├── HbaseMatrix$$anonfun$4.class
│ ├── HbaseMatrix$$anonfun$5.class
│ ├── HbaseMatrix$$anonfun$6.class
│ ├── HbaseMatrix$$anonfun$7.class
│ ├── HbaseMatrix$$anonfun$8.class
│ ├── HbaseMatrix$$anonfun$9.class
│ ├── HbaseMatrix$$anonfun$tag_vector$1.class
│ ├── HbaseMatrix$.class
│ ├── HbaseMatrix$delayedInit$body.class
│ ├── HbaseMatrix.class
│ ├── localMatrix$$anonfun$1.class
│ ├── localMatrix$$anonfun$2$$anonfun$apply$mcVI$sp$1.class
│ ├── localMatrix$$anonfun$2.class
│ ├── localMatrix$$anonfun$3.class
│ ├── localMatrix$$anonfun$4.class
│ ├── localMatrix$$anonfun$5.class
│ ├── localMatrix$$anonfun$6.class
│ ├── localMatrix$$anonfun$7.class
│ ├── localMatrix$.class
│ ├── localMatrix$delayedInit$body.class
│ └── localMatrix.class
└── streams
├── $global
├── $global
│ └── dumpStructure
│ │ └── $global
│ │ └── streams
│ │ └── out
├── dependencyPositions
│ └── $global
│ │ └── streams
│ │ └── update_cache_2.11
│ │ ├── input_dsp
│ │ └── output_dsp
├── ivyConfiguration
│ └── $global
│ │ └── streams
│ │ └── out
├── ivySbt
│ └── $global
│ │ └── streams
│ │ └── out
├── projectDescriptors
│ └── $global
│ │ └── streams
│ │ └── out
├── update
│ └── $global
│ │ └── streams
│ │ ├── out
│ │ └── update_cache_2.11
│ │ ├── inputs
│ │ └── output
├── updateClassifiers
│ └── $global
│ │ └── streams
│ │ └── out
└── updateSbtClassifiers
│ └── $global
│ └── streams
│ └── out
├── compile
├── externalDependencyClasspath
│ └── $global
│ │ └── streams
│ │ └── export
├── managedClasspath
│ └── $global
│ │ └── streams
│ │ └── export
├── unmanagedClasspath
│ └── $global
│ │ └── streams
│ │ └── export
└── unmanagedJars
│ └── $global
│ └── streams
│ └── export
├── runtime
├── externalDependencyClasspath
│ └── $global
│ │ └── streams
│ │ └── export
├── managedClasspath
│ └── $global
│ │ └── streams
│ │ └── export
├── unmanagedClasspath
│ └── $global
│ │ └── streams
│ │ └── export
└── unmanagedJars
│ └── $global
│ └── streams
│ └── export
└── test
├── externalDependencyClasspath
└── $global
│ └── streams
│ └── export
├── managedClasspath
└── $global
│ └── streams
│ └── export
├── unmanagedClasspath
└── $global
│ └── streams
│ └── export
└── unmanagedJars
└── $global
└── streams
└── export
/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/.idea/copyright/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/.idea/modules/spark_recommend-build.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
--------------------------------------------------------------------------------
/.idea/modules/spark_recommend.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/.idea/sbt.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/.idea/scala_compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/uiDesigner.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | -
6 |
7 |
8 | -
9 |
10 |
11 | -
12 |
13 |
14 | -
15 |
16 |
17 | -
18 |
19 |
20 |
21 |
22 |
23 | -
24 |
25 |
26 |
27 |
28 |
29 | -
30 |
31 |
32 |
33 |
34 |
35 | -
36 |
37 |
38 |
39 |
40 |
41 | -
42 |
43 |
44 |
45 |
46 | -
47 |
48 |
49 |
50 |
51 | -
52 |
53 |
54 |
55 |
56 | -
57 |
58 |
59 |
60 |
61 | -
62 |
63 |
64 |
65 |
66 | -
67 |
68 |
69 |
70 |
71 | -
72 |
73 |
74 | -
75 |
76 |
77 |
78 |
79 | -
80 |
81 |
82 |
83 |
84 | -
85 |
86 |
87 |
88 |
89 | -
90 |
91 |
92 |
93 |
94 | -
95 |
96 |
97 |
98 |
99 | -
100 |
101 |
102 | -
103 |
104 |
105 | -
106 |
107 |
108 | -
109 |
110 |
111 | -
112 |
113 |
114 |
115 |
116 | -
117 |
118 |
119 | -
120 |
121 |
122 |
123 |
124 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # spark_recommend
2 | * java 部分
3 | ..src/main/java/hbase //hbase 增删改查例子,以后用于标签提取和保存
4 |
5 | ..src/main/java/kafka //kafka produce 和 consumer 用于对接spark_streaming
6 |
7 | * scala 部分
8 |
9 | ..src/main/scala/hbase //hbase crud操作 和 rdd操作
10 |
11 | ..src/main/scala/matrix //矩阵处理目录
12 |
13 | ..src/main/scala/spark_streaming //spark streaming 实时处理部分
14 |
15 | ..src/main/scala/tags //标签投放模块
16 |
17 | ..src/main/scala/tags/localMatrix.scala //本地向量和矩阵的使用
18 |
19 | ..src/main/scala/tags/HbaseMatrix.scla //模拟广告投放模块(分布式矩阵的应用)
20 |
--------------------------------------------------------------------------------
/build.sbt:
--------------------------------------------------------------------------------
1 | name := "spark_recommend"
2 |
3 | version := "1.0"
4 |
5 | scalaVersion := "2.11.8"
6 |
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-121/._partitioner.crc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-121/._partitioner.crc
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-121/.part-00000.crc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-121/.part-00000.crc
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-121/.part-00001.crc:
--------------------------------------------------------------------------------
1 | crc @fh^
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-121/_partitioner:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-121/_partitioner
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-121/part-00000:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-121/part-00000
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-121/part-00001:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-121/part-00001
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-181/.part-00000.crc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-181/.part-00000.crc
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-181/.part-00001.crc:
--------------------------------------------------------------------------------
1 | crc d
O�
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-181/part-00000:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-181/part-00000
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-181/part-00001:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-181/part-00001
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-61/._partitioner.crc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-61/._partitioner.crc
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-61/.part-00000.crc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-61/.part-00000.crc
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-61/.part-00001.crc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-61/.part-00001.crc
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-61/_partitioner:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-61/_partitioner
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-61/part-00000:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-61/part-00000
--------------------------------------------------------------------------------
/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-61/part-00001:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/checkpoint/1cc9f243-2b19-4f2c-934a-88ec14e83ba6/rdd-61/part-00001
--------------------------------------------------------------------------------
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version = 0.13.8
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | logLevel := Level.Warn
--------------------------------------------------------------------------------
/project/project/target/config-classes/$9f0a30d05c6bc0ace695$$anonfun$$sbtdef$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/project/target/config-classes/$9f0a30d05c6bc0ace695$$anonfun$$sbtdef$1.class
--------------------------------------------------------------------------------
/project/project/target/config-classes/$9f0a30d05c6bc0ace695$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/project/target/config-classes/$9f0a30d05c6bc0ace695$.class
--------------------------------------------------------------------------------
/project/project/target/config-classes/$9f0a30d05c6bc0ace695.cache:
--------------------------------------------------------------------------------
1 | sbt.internals.DslEntry
--------------------------------------------------------------------------------
/project/project/target/config-classes/$9f0a30d05c6bc0ace695.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/project/target/config-classes/$9f0a30d05c6bc0ace695.class
--------------------------------------------------------------------------------
/project/target/config-classes/$16d5d8d1446d5290a2c6$$anonfun$$sbtdef$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$16d5d8d1446d5290a2c6$$anonfun$$sbtdef$1.class
--------------------------------------------------------------------------------
/project/target/config-classes/$16d5d8d1446d5290a2c6$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$16d5d8d1446d5290a2c6$.class
--------------------------------------------------------------------------------
/project/target/config-classes/$16d5d8d1446d5290a2c6.cache:
--------------------------------------------------------------------------------
1 | sbt.internals.DslEntry
--------------------------------------------------------------------------------
/project/target/config-classes/$16d5d8d1446d5290a2c6.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$16d5d8d1446d5290a2c6.class
--------------------------------------------------------------------------------
/project/target/config-classes/$422f0cca637471d8282b$$anonfun$$sbtdef$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$422f0cca637471d8282b$$anonfun$$sbtdef$1.class
--------------------------------------------------------------------------------
/project/target/config-classes/$422f0cca637471d8282b$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$422f0cca637471d8282b$.class
--------------------------------------------------------------------------------
/project/target/config-classes/$422f0cca637471d8282b.cache:
--------------------------------------------------------------------------------
1 | sbt.internals.DslEntry
--------------------------------------------------------------------------------
/project/target/config-classes/$422f0cca637471d8282b.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$422f0cca637471d8282b.class
--------------------------------------------------------------------------------
/project/target/config-classes/$53fa29f8c78371e1ae05$$anonfun$$sbtdef$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$53fa29f8c78371e1ae05$$anonfun$$sbtdef$1.class
--------------------------------------------------------------------------------
/project/target/config-classes/$53fa29f8c78371e1ae05$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$53fa29f8c78371e1ae05$.class
--------------------------------------------------------------------------------
/project/target/config-classes/$53fa29f8c78371e1ae05.cache:
--------------------------------------------------------------------------------
1 | sbt.internals.DslEntry
--------------------------------------------------------------------------------
/project/target/config-classes/$53fa29f8c78371e1ae05.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$53fa29f8c78371e1ae05.class
--------------------------------------------------------------------------------
/project/target/config-classes/$6ab7769ba7b5d43838ab$$anonfun$$sbtdef$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$6ab7769ba7b5d43838ab$$anonfun$$sbtdef$1.class
--------------------------------------------------------------------------------
/project/target/config-classes/$6ab7769ba7b5d43838ab$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$6ab7769ba7b5d43838ab$.class
--------------------------------------------------------------------------------
/project/target/config-classes/$6ab7769ba7b5d43838ab.cache:
--------------------------------------------------------------------------------
1 | sbt.internals.DslEntry
--------------------------------------------------------------------------------
/project/target/config-classes/$6ab7769ba7b5d43838ab.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$6ab7769ba7b5d43838ab.class
--------------------------------------------------------------------------------
/project/target/config-classes/$73ba6e6e6fcb8ddc8674$$anonfun$$sbtdef$1$$anonfun$apply$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$73ba6e6e6fcb8ddc8674$$anonfun$$sbtdef$1$$anonfun$apply$1.class
--------------------------------------------------------------------------------
/project/target/config-classes/$73ba6e6e6fcb8ddc8674$$anonfun$$sbtdef$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$73ba6e6e6fcb8ddc8674$$anonfun$$sbtdef$1.class
--------------------------------------------------------------------------------
/project/target/config-classes/$73ba6e6e6fcb8ddc8674$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$73ba6e6e6fcb8ddc8674$.class
--------------------------------------------------------------------------------
/project/target/config-classes/$73ba6e6e6fcb8ddc8674.cache:
--------------------------------------------------------------------------------
1 | sbt.internals.DslEntry
--------------------------------------------------------------------------------
/project/target/config-classes/$73ba6e6e6fcb8ddc8674.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$73ba6e6e6fcb8ddc8674.class
--------------------------------------------------------------------------------
/project/target/config-classes/$9148c6b7792a232e3c20$$anonfun$$sbtdef$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$9148c6b7792a232e3c20$$anonfun$$sbtdef$1.class
--------------------------------------------------------------------------------
/project/target/config-classes/$9148c6b7792a232e3c20$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$9148c6b7792a232e3c20$.class
--------------------------------------------------------------------------------
/project/target/config-classes/$9148c6b7792a232e3c20.cache:
--------------------------------------------------------------------------------
1 | sbt.internals.DslEntry
--------------------------------------------------------------------------------
/project/target/config-classes/$9148c6b7792a232e3c20.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/config-classes/$9148c6b7792a232e3c20.class
--------------------------------------------------------------------------------
/project/target/resolution-cache/default/spark_recommend-build/scala_2.10/sbt_0.13/0.1-SNAPSHOT/resolved.xml.properties:
--------------------------------------------------------------------------------
1 | #default#spark_recommend-build;0.1-SNAPSHOT resolved revisions
2 | #Tue Jun 28 01:36:41 PDT 2016
3 | +revision\:\#@\#\:+2.10.4\:\#@\#\:+module\:\#@\#\:+scala-library\:\#@\#\:+organisation\:\#@\#\:+org.scala-lang\:\#@\#\:+branch\:\#@\#\:+@\#\:NULL\:\#@\:\#@\#\:=2.10.4 ? 2.10.4 null
4 | +revision\:\#@\#\:+2.10.4\:\#@\#\:+module\:\#@\#\:+scala-compiler\:\#@\#\:+organisation\:\#@\#\:+org.scala-lang\:\#@\#\:+branch\:\#@\#\:+@\#\:NULL\:\#@\:\#@\#\:=2.10.4 release 2.10.4 null
5 | +revision\:\#@\#\:+0.13.8\:\#@\#\:+module\:\#@\#\:+sbt\:\#@\#\:+organisation\:\#@\#\:+org.scala-sbt\:\#@\#\:+branch\:\#@\#\:+@\#\:NULL\:\#@\:\#@\#\:=0.13.8 release 0.13.8 null
6 |
--------------------------------------------------------------------------------
/project/target/resolution-cache/default/spark_recommend-build/scala_2.10/sbt_0.13/0.1-SNAPSHOT/resolved.xml.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
10 |
11 | spark_recommend-build
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/project/target/resolution-cache/reports/default-spark_recommend-build-compile.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/project/target/resolution-cache/reports/default-spark_recommend-build-docs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/project/target/resolution-cache/reports/default-spark_recommend-build-optional.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/project/target/resolution-cache/reports/default-spark_recommend-build-plugin.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/project/target/resolution-cache/reports/default-spark_recommend-build-pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/project/target/resolution-cache/reports/default-spark_recommend-build-runtime-internal.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/project/target/resolution-cache/reports/default-spark_recommend-build-runtime.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/project/target/resolution-cache/reports/default-spark_recommend-build-scala-tool.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
--------------------------------------------------------------------------------
/project/target/resolution-cache/reports/default-spark_recommend-build-sources.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/project/target/resolution-cache/reports/default-spark_recommend-build-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/project/target/resolution-cache/reports/ivy-report.css:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | *
17 | */
18 |
19 | body {
20 | font-family:"Trebuchet MS",Verdana,Geneva,Arial,Helvetica,sans-serif;
21 | font-size:small;
22 | }
23 |
24 | div#logo {
25 | float: right;
26 | padding-left: 10px;
27 | padding-bottom: 10px;
28 | background: white;
29 | text-align: center;
30 | }
31 |
32 | #logo img {
33 | border: 0;
34 | }
35 |
36 | div#date {
37 | font-style: italic;
38 | padding-left: 60px;
39 | padding-bottom: 40px;
40 | }
41 |
42 |
43 | h1 {
44 | margin-bottom:2px;
45 |
46 | border-color:#7A9437;
47 | border-style:solid;
48 | border-width:0 0 3px 0;
49 | }
50 |
51 | span#module {
52 | color:#7A9437;
53 | text-decoration:none;
54 | }
55 |
56 | span#organisation {
57 | color:black;
58 | text-decoration:none;
59 | }
60 |
61 | #confmenu {
62 | color: #000;
63 | border-bottom: 2px solid black;
64 | margin: 12px 0px 0px 0px;
65 | padding: 0px;
66 | z-index: 1;
67 | padding-left: 10px
68 | }
69 |
70 | #confmenu li {
71 | display: inline;
72 | overflow: hidden;
73 | list-style-type: none;
74 | }
75 |
76 | #confmenu a, a.active {
77 | color: #DEDECF;
78 | background: #898B5E;
79 | font: bold 1em "Trebuchet MS", Arial, sans-serif;
80 | border: 2px solid black;
81 | padding: 2px 5px 0px 5px;
82 | text-decoration: none;
83 | }
84 |
85 | /*
86 | background: #ABAD85 #CED4BD
87 | background: #DEE4CD
88 | */
89 |
90 | #confmenu a.active {
91 | color: #7A9437;
92 | background: #DEE4CD;
93 | border-bottom: 3px solid #DEE4CD;
94 | }
95 |
96 | #confmenu a:hover {
97 | color: #fff;
98 | background: #ADC09F;
99 | }
100 |
101 | #confmenu a:visited {
102 | color: #DEDECF;
103 | }
104 |
105 | #confmenu a.active:visited {
106 | color: #7A9437;
107 | }
108 |
109 | #confmenu a.active:hover {
110 | background: #DEE4CD;
111 | color: #DEDECF;
112 | }
113 |
114 | #content {
115 | background: #DEE4CD;
116 | padding: 20px;
117 | border: 2px solid black;
118 | border-top: none;
119 | z-index: 2;
120 | }
121 |
122 | #content a {
123 | text-decoration: none;
124 | color: #E8E9BE;
125 | }
126 |
127 | #content a:hover {
128 | background: #898B5E;
129 | }
130 |
131 |
132 | h2 {
133 | margin-bottom:2px;
134 | font-size:medium;
135 |
136 | border-color:#7A9437;
137 | border-style:solid;
138 | border-width:0 0 2px 0;
139 | }
140 |
141 | h3 {
142 | margin-top:30px;
143 | margin-bottom:2px;
144 | padding: 5 5 5 0;
145 | font-size: 24px;
146 | border-style:solid;
147 | border-width:0 0 2px 0;
148 | }
149 |
150 | h4 {
151 | margin-bottom:2px;
152 | margin-top:2px;
153 | font-size:medium;
154 |
155 | border-color:#7A9437;
156 | border-style:dashed;
157 | border-width:0 0 1px 0;
158 | }
159 |
160 | h5 {
161 | margin-bottom:2px;
162 | margin-top:2px;
163 | margin-left:20px;
164 | font-size:medium;
165 | }
166 |
167 | span.resolved {
168 | padding-left: 15px;
169 | font-weight: 500;
170 | font-size: small;
171 | }
172 |
173 |
174 | #content table {
175 | border-collapse:collapse;
176 | width:90%;
177 | margin:auto;
178 | margin-top: 5px;
179 | }
180 | #content thead {
181 | background-color:#CED4BD;
182 | border:1px solid #7A9437;
183 | }
184 | #content tbody {
185 | border-collapse:collapse;
186 | background-color:#FFFFFF;
187 | border:1px solid #7A9437;
188 | }
189 |
190 | #content th {
191 | font-family:monospace;
192 | border:1px solid #7A9437;
193 | padding:5px;
194 | }
195 |
196 | #content td {
197 | border:1px dotted #7A9437;
198 | padding:0 3 0 3;
199 | }
200 |
201 | #content table a {
202 | color:#7A9437;
203 | text-decoration:none;
204 | }
205 |
206 | #content table a:hover {
207 | background-color:#CED4BD;
208 | color:#7A9437;
209 | }
210 |
211 |
212 |
213 | table.deps {
214 | border-collapse:collapse;
215 | width:90%;
216 | margin:auto;
217 | margin-top: 5px;
218 | }
219 |
220 | table.deps thead {
221 | background-color:#CED4BD;
222 | border:1px solid #7A9437;
223 | }
224 | table.deps tbody {
225 | border-collapse:collapse;
226 | background-color:#FFFFFF;
227 | border:1px solid #7A9437;
228 | }
229 |
230 | table.deps th {
231 | font-family:monospace;
232 | border:1px solid #7A9437;
233 | padding:2;
234 | }
235 |
236 | table.deps td {
237 | border:1px dotted #7A9437;
238 | padding:0 3 0 3;
239 | }
240 |
241 |
242 |
243 |
244 |
245 | table.header {
246 | border:0;
247 | width:90%;
248 | margin:auto;
249 | margin-top: 5px;
250 | }
251 |
252 | table.header thead {
253 | border:0;
254 | }
255 | table.header tbody {
256 | border:0;
257 | }
258 | table.header tr {
259 | padding:0px;
260 | border:0;
261 | }
262 | table.header td {
263 | padding:0 3 0 3;
264 | border:0;
265 | }
266 |
267 | td.title {
268 | width:150px;
269 | margin-right:15px;
270 |
271 | font-size:small;
272 | font-weight:700;
273 | }
274 |
275 | td.title:first-letter {
276 | color:#7A9437;
277 | background-color:transparent;
278 | }
279 |
280 |
--------------------------------------------------------------------------------
/project/target/streams/$global/$global/$global/streams/out:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/streams/$global/$global/$global/streams/out
--------------------------------------------------------------------------------
/project/target/streams/$global/dependencyPositions/$global/streams/update_cache_2.10/input_dsp:
--------------------------------------------------------------------------------
1 | org.scala-lang
scala-library 2.10.4 provided
--------------------------------------------------------------------------------
/project/target/streams/$global/dependencyPositions/$global/streams/update_cache_2.10/output_dsp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/streams/$global/dependencyPositions/$global/streams/update_cache_2.10/output_dsp
--------------------------------------------------------------------------------
/project/target/streams/$global/ivyConfiguration/$global/streams/out:
--------------------------------------------------------------------------------
1 | [debug] Other repositories:
2 | [debug] Default repositories:
3 | [debug] Using inline dependencies specified in Scala.
4 |
--------------------------------------------------------------------------------
/project/target/streams/$global/ivySbt/$global/streams/out:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/streams/$global/ivySbt/$global/streams/out
--------------------------------------------------------------------------------
/project/target/streams/$global/projectDescriptors/$global/streams/out:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/streams/$global/projectDescriptors/$global/streams/out
--------------------------------------------------------------------------------
/project/target/streams/$global/update/$global/streams/update_cache_2.10/inputs:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/streams/$global/update/$global/streams/update_cache_2.10/inputs
--------------------------------------------------------------------------------
/project/target/streams/$global/update/$global/streams/update_cache_2.10/output:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/streams/$global/update/$global/streams/update_cache_2.10/output
--------------------------------------------------------------------------------
/project/target/streams/compile/$global/$global/discoveredMainClasses/data:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/project/target/streams/compile/compile/$global/streams/out:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/streams/compile/compile/$global/streams/out
--------------------------------------------------------------------------------
/project/target/streams/compile/compileIncremental/$global/streams/export:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/streams/compile/compileIncremental/$global/streams/export
--------------------------------------------------------------------------------
/project/target/streams/compile/compileIncremental/$global/streams/out:
--------------------------------------------------------------------------------
1 | [debug]
2 | [debug] Initial source changes:
3 | [debug] removed:Set()
4 | [debug] added: Set()
5 | [debug] modified: Set()
6 | [debug] Removed products: Set()
7 | [debug] External API changes: API Changes: Set()
8 | [debug] Modified binary dependencies: Set()
9 | [debug] Initial directly invalidated sources: Set()
10 | [debug]
11 | [debug] Sources indirectly invalidated by:
12 | [debug] product: Set()
13 | [debug] binary dep: Set()
14 | [debug] external source: Set()
15 | [debug] All initially invalidated sources: Set()
16 |
--------------------------------------------------------------------------------
/project/target/streams/compile/copyResources/$global/streams/copy-resources:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/project/target/streams/compile/copyResources/$global/streams/copy-resources
--------------------------------------------------------------------------------
/project/target/streams/compile/copyResources/$global/streams/out:
--------------------------------------------------------------------------------
1 | [debug] Copy resource mappings:
2 | [debug]
3 |
--------------------------------------------------------------------------------
/project/target/streams/compile/dependencyClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 | /home/lzz/.sbt/boot/scala-2.10.4/lib/scala-library.jar:/home/lzz/.ivy2/cache/org.scala-sbt/sbt/jars/sbt-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/main/jars/main-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/actions/jars/actions-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/classpath/jars/classpath-0.13.8.jar:/home/lzz/.sbt/boot/scala-2.10.4/lib/scala-compiler.jar:/home/lzz/.sbt/boot/scala-2.10.4/lib/scala-reflect.jar:/home/lzz/.ivy2/cache/org.scala-sbt/launcher-interface/jars/launcher-interface-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/interface/jars/interface-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/io/jars/io-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/control/jars/control-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/completion/jars/completion-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/collections/jars/collections-0.13.8.jar:/home/lzz/.ivy2/cache/jline/jline/jars/jline-2.11.jar:/home/lzz/.ivy2/cache/org.scala-sbt/api/jars/api-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/compiler-integration/jars/compiler-integration-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/incremental-compiler/jars/incremental-compiler-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/logging/jars/logging-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/process/jars/process-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/relation/jars/relation-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/compile/jars/compile-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/classfile/jars/classfile-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/persist/jars/persist-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-tools.sbinary/sbinary_2.10/jars/sbinary_2.10-0.4.2.jar:/home/lzz/.ivy2/cache/org.scala-sbt/compiler-ivy-integration/jars/compiler-ivy-integration-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/ivy/jars/ivy-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/cross/jars/cross-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt.ivy/ivy/jars/ivy-2.3.0-sbt-fccfbd44c9f64523b61398a0155784dcbaeae28f.jar:/home/lzz/.ivy2/cache/com.jcraft/jsch/jars/jsch-0.1.46.jar:/home/lzz/.ivy2/cache/org.scala-sbt/serialization_2.10/jars/serialization_2.10-0.1.1.jar:/home/lzz/.ivy2/cache/org.scala-lang.modules/scala-pickling_2.10/jars/scala-pickling_2.10-0.10.0.jar:/home/lzz/.ivy2/cache/org.scalamacros/quasiquotes_2.10/jars/quasiquotes_2.10-2.0.1.jar:/home/lzz/.ivy2/cache/org.json4s/json4s-core_2.10/jars/json4s-core_2.10-3.2.10.jar:/home/lzz/.ivy2/cache/org.json4s/json4s-ast_2.10/jars/json4s-ast_2.10-3.2.10.jar:/home/lzz/.ivy2/cache/com.thoughtworks.paranamer/paranamer/jars/paranamer-2.6.jar:/home/lzz/.ivy2/cache/org.spire-math/jawn-parser_2.10/jars/jawn-parser_2.10-0.6.0.jar:/home/lzz/.ivy2/cache/org.spire-math/json4s-support_2.10/jars/json4s-support_2.10-0.6.0.jar:/home/lzz/.ivy2/cache/org.scala-sbt/run/jars/run-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/task-system/jars/task-system-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/tasks/jars/tasks-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/tracking/jars/tracking-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/cache/jars/cache-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/testing/jars/testing-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/test-agent/jars/test-agent-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/test-interface/jars/test-interface-1.0.jar:/home/lzz/.ivy2/cache/org.scala-sbt/main-settings/jars/main-settings-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/apply-macro/jars/apply-macro-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/command/jars/command-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/logic/jars/logic-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/compiler-interface/jars/compiler-interface-src-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/compiler-interface/jars/compiler-interface-bin-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/precompiled-2_8_2/jars/compiler-interface-bin-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/precompiled-2_9_2/jars/compiler-interface-bin-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/precompiled-2_9_3/jars/compiler-interface-bin-0.13.8.jar
2 |
--------------------------------------------------------------------------------
/project/target/streams/compile/exportedProducts/$global/streams/export:
--------------------------------------------------------------------------------
1 | /home/lzz/work/idea_work/spark_work/spark_recommend/project/target/scala-2.10/sbt-0.13/classes
2 |
--------------------------------------------------------------------------------
/project/target/streams/compile/externalDependencyClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 | /home/lzz/.sbt/boot/scala-2.10.4/lib/scala-library.jar:/home/lzz/.ivy2/cache/org.scala-sbt/sbt/jars/sbt-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/main/jars/main-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/actions/jars/actions-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/classpath/jars/classpath-0.13.8.jar:/home/lzz/.sbt/boot/scala-2.10.4/lib/scala-compiler.jar:/home/lzz/.sbt/boot/scala-2.10.4/lib/scala-reflect.jar:/home/lzz/.ivy2/cache/org.scala-sbt/launcher-interface/jars/launcher-interface-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/interface/jars/interface-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/io/jars/io-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/control/jars/control-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/completion/jars/completion-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/collections/jars/collections-0.13.8.jar:/home/lzz/.ivy2/cache/jline/jline/jars/jline-2.11.jar:/home/lzz/.ivy2/cache/org.scala-sbt/api/jars/api-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/compiler-integration/jars/compiler-integration-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/incremental-compiler/jars/incremental-compiler-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/logging/jars/logging-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/process/jars/process-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/relation/jars/relation-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/compile/jars/compile-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/classfile/jars/classfile-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/persist/jars/persist-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-tools.sbinary/sbinary_2.10/jars/sbinary_2.10-0.4.2.jar:/home/lzz/.ivy2/cache/org.scala-sbt/compiler-ivy-integration/jars/compiler-ivy-integration-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/ivy/jars/ivy-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/cross/jars/cross-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt.ivy/ivy/jars/ivy-2.3.0-sbt-fccfbd44c9f64523b61398a0155784dcbaeae28f.jar:/home/lzz/.ivy2/cache/com.jcraft/jsch/jars/jsch-0.1.46.jar:/home/lzz/.ivy2/cache/org.scala-sbt/serialization_2.10/jars/serialization_2.10-0.1.1.jar:/home/lzz/.ivy2/cache/org.scala-lang.modules/scala-pickling_2.10/jars/scala-pickling_2.10-0.10.0.jar:/home/lzz/.ivy2/cache/org.scalamacros/quasiquotes_2.10/jars/quasiquotes_2.10-2.0.1.jar:/home/lzz/.ivy2/cache/org.json4s/json4s-core_2.10/jars/json4s-core_2.10-3.2.10.jar:/home/lzz/.ivy2/cache/org.json4s/json4s-ast_2.10/jars/json4s-ast_2.10-3.2.10.jar:/home/lzz/.ivy2/cache/com.thoughtworks.paranamer/paranamer/jars/paranamer-2.6.jar:/home/lzz/.ivy2/cache/org.spire-math/jawn-parser_2.10/jars/jawn-parser_2.10-0.6.0.jar:/home/lzz/.ivy2/cache/org.spire-math/json4s-support_2.10/jars/json4s-support_2.10-0.6.0.jar:/home/lzz/.ivy2/cache/org.scala-sbt/run/jars/run-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/task-system/jars/task-system-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/tasks/jars/tasks-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/tracking/jars/tracking-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/cache/jars/cache-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/testing/jars/testing-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/test-agent/jars/test-agent-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/test-interface/jars/test-interface-1.0.jar:/home/lzz/.ivy2/cache/org.scala-sbt/main-settings/jars/main-settings-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/apply-macro/jars/apply-macro-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/command/jars/command-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/logic/jars/logic-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/compiler-interface/jars/compiler-interface-src-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/compiler-interface/jars/compiler-interface-bin-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/precompiled-2_8_2/jars/compiler-interface-bin-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/precompiled-2_9_2/jars/compiler-interface-bin-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/precompiled-2_9_3/jars/compiler-interface-bin-0.13.8.jar
2 |
--------------------------------------------------------------------------------
/project/target/streams/compile/internalDependencyClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/project/target/streams/compile/managedClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 | /home/lzz/.sbt/boot/scala-2.10.4/lib/scala-library.jar:/home/lzz/.ivy2/cache/org.scala-sbt/sbt/jars/sbt-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/main/jars/main-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/actions/jars/actions-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/classpath/jars/classpath-0.13.8.jar:/home/lzz/.sbt/boot/scala-2.10.4/lib/scala-compiler.jar:/home/lzz/.sbt/boot/scala-2.10.4/lib/scala-reflect.jar:/home/lzz/.ivy2/cache/org.scala-sbt/launcher-interface/jars/launcher-interface-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/interface/jars/interface-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/io/jars/io-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/control/jars/control-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/completion/jars/completion-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/collections/jars/collections-0.13.8.jar:/home/lzz/.ivy2/cache/jline/jline/jars/jline-2.11.jar:/home/lzz/.ivy2/cache/org.scala-sbt/api/jars/api-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/compiler-integration/jars/compiler-integration-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/incremental-compiler/jars/incremental-compiler-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/logging/jars/logging-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/process/jars/process-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/relation/jars/relation-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/compile/jars/compile-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/classfile/jars/classfile-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/persist/jars/persist-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-tools.sbinary/sbinary_2.10/jars/sbinary_2.10-0.4.2.jar:/home/lzz/.ivy2/cache/org.scala-sbt/compiler-ivy-integration/jars/compiler-ivy-integration-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/ivy/jars/ivy-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/cross/jars/cross-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt.ivy/ivy/jars/ivy-2.3.0-sbt-fccfbd44c9f64523b61398a0155784dcbaeae28f.jar:/home/lzz/.ivy2/cache/com.jcraft/jsch/jars/jsch-0.1.46.jar:/home/lzz/.ivy2/cache/org.scala-sbt/serialization_2.10/jars/serialization_2.10-0.1.1.jar:/home/lzz/.ivy2/cache/org.scala-lang.modules/scala-pickling_2.10/jars/scala-pickling_2.10-0.10.0.jar:/home/lzz/.ivy2/cache/org.scalamacros/quasiquotes_2.10/jars/quasiquotes_2.10-2.0.1.jar:/home/lzz/.ivy2/cache/org.json4s/json4s-core_2.10/jars/json4s-core_2.10-3.2.10.jar:/home/lzz/.ivy2/cache/org.json4s/json4s-ast_2.10/jars/json4s-ast_2.10-3.2.10.jar:/home/lzz/.ivy2/cache/com.thoughtworks.paranamer/paranamer/jars/paranamer-2.6.jar:/home/lzz/.ivy2/cache/org.spire-math/jawn-parser_2.10/jars/jawn-parser_2.10-0.6.0.jar:/home/lzz/.ivy2/cache/org.spire-math/json4s-support_2.10/jars/json4s-support_2.10-0.6.0.jar:/home/lzz/.ivy2/cache/org.scala-sbt/run/jars/run-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/task-system/jars/task-system-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/tasks/jars/tasks-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/tracking/jars/tracking-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/cache/jars/cache-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/testing/jars/testing-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/test-agent/jars/test-agent-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/test-interface/jars/test-interface-1.0.jar:/home/lzz/.ivy2/cache/org.scala-sbt/main-settings/jars/main-settings-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/apply-macro/jars/apply-macro-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/command/jars/command-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/logic/jars/logic-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/compiler-interface/jars/compiler-interface-src-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/compiler-interface/jars/compiler-interface-bin-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/precompiled-2_8_2/jars/compiler-interface-bin-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/precompiled-2_9_2/jars/compiler-interface-bin-0.13.8.jar:/home/lzz/.ivy2/cache/org.scala-sbt/precompiled-2_9_3/jars/compiler-interface-bin-0.13.8.jar
2 |
--------------------------------------------------------------------------------
/project/target/streams/compile/unmanagedClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/project/target/streams/compile/unmanagedJars/$global/streams/export:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/project/target/streams/runtime/dependencyClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 | /home/lzz/work/idea_work/spark_work/spark_recommend/project/target/scala-2.10/sbt-0.13/classes
2 |
--------------------------------------------------------------------------------
/project/target/streams/runtime/exportedProducts/$global/streams/export:
--------------------------------------------------------------------------------
1 | /home/lzz/work/idea_work/spark_work/spark_recommend/project/target/scala-2.10/sbt-0.13/classes
2 |
--------------------------------------------------------------------------------
/project/target/streams/runtime/externalDependencyClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/project/target/streams/runtime/fullClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 | /home/lzz/work/idea_work/spark_work/spark_recommend/project/target/scala-2.10/sbt-0.13/classes
2 |
--------------------------------------------------------------------------------
/project/target/streams/runtime/internalDependencyClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 | /home/lzz/work/idea_work/spark_work/spark_recommend/project/target/scala-2.10/sbt-0.13/classes
2 |
--------------------------------------------------------------------------------
/project/target/streams/runtime/managedClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/project/target/streams/runtime/unmanagedClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/project/target/streams/runtime/unmanagedJars/$global/streams/export:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/src/main/java/hbase/Hbase_CURD.java:
--------------------------------------------------------------------------------
1 | package hbase;
2 |
3 | import java.io.IOException;
4 |
5 | import org.apache.hadoop.conf.Configuration;
6 | import org.apache.hadoop.hbase.HBaseConfiguration;
7 | import org.apache.hadoop.hbase.TableName;
8 | import org.apache.hadoop.hbase.client.Connection;
9 | import org.apache.hadoop.hbase.client.ConnectionFactory;
10 | import org.apache.hadoop.hbase.client.Get;
11 | import org.apache.hadoop.hbase.client.Table;
12 | import org.apache.hadoop.hbase.client.Put;
13 | import org.apache.hadoop.hbase.client.Result;
14 | import org.apache.hadoop.hbase.client.ResultScanner;
15 | import org.apache.hadoop.hbase.client.Scan;
16 | import org.apache.hadoop.hbase.util.Bytes;
17 |
18 | /**
19 | * Created by lzz on 6/28/16.
20 | * 相关例子 http://hbase.apache.org/apidocs/index.html
21 | */
22 | public class Hbase_CURD {
23 | public static void main(String[] args) throws IOException {
24 | // You need a configuration object to tell the client where to connect.
25 | // When you create a HBaseConfiguration, it reads in whatever you've set
26 | // into your hbase-site.xml and in hbase-default.xml, as long as these can
27 | // be found on the CLASSPATH
28 | Configuration config = HBaseConfiguration.create();
29 | config.set("hbase.master", "hadoop006:16010" );
30 | config.addResource( "main/resources/hbase-site.xml" );
31 | // Next you need a Connection to the cluster. Create one. When done with it,
32 | // close it. A try/finally is a good way to ensure it gets closed or use
33 | // the jdk7 idiom, try-with-resources: see
34 | // https://docs.oracle.com/javase/tutorial/essential/exceptions/tryResourceClose.html
35 | //
36 | // Connections are heavyweight. Create one once and keep it around. From a Connection
37 | // you get a Table instance to access Tables, an Admin instance to administer the cluster,
38 | // and RegionLocator to find where regions are out on the cluster. As opposed to Connections,
39 | // Table, Admin and RegionLocator instances are lightweight; create as you need them and then
40 | // close when done.
41 | //
42 | Connection connection = ConnectionFactory.createConnection(config);
43 | try {
44 |
45 | // The below instantiates a Table object that connects you to the "test" table
46 | // (TableName.valueOf turns String into a TableName instance).
47 | // When done with it, close it (Should start a try/finally after this creation so it gets
48 | // closed for sure the jdk7 idiom, try-with-resources: see
49 | // https://docs.oracle.com/javase/tutorial/essential/exceptions/tryResourceClose.html)
50 | Table table = connection.getTable(TableName.valueOf("test"));
51 | try {
52 |
53 | // To add to a row, use Put. A Put constructor takes the name of the row
54 | // you want to insert into as a byte array. In HBase, the Bytes class has
55 | // utility for converting all kinds of java types to byte arrays. In the
56 | // below, we are converting the String "myLittleRow" into a byte array to
57 | // use as a row key for our update. Once you have a Put instance, you can
58 | // adorn it by setting the names of columns you want to update on the row,
59 | // the timestamp to use in your update, etc. If no timestamp, the server
60 | // applies current time to the edits.
61 | Put p = new Put(Bytes.toBytes("myLittleRow"));
62 |
63 | // To set the value you'd like to update in the row 'myLittleRow', specify
64 | // the column family, column qualifier, and value of the table cell you'd
65 | // like to update. The column family must already exist in your table
66 | // schema. The qualifier can be anything. All must be specified as byte
67 | // arrays as hbase is all about byte arrays. Lets pretend the table
68 | // 'test' was created with a family 'cf'.
69 | p.add(Bytes.toBytes("cf"), Bytes.toBytes("someQualifier"),
70 | Bytes.toBytes("Some Value"));
71 |
72 | // Once you've adorned your Put instance with all the updates you want to
73 | // make, to commit it do the following (The HTable#put method takes the
74 | // Put instance you've been building and pushes the changes you made into
75 | // hbase)
76 | table.put(p);
77 |
78 | // Now, to retrieve the data we just wrote. The values that come back are
79 | // Result instances. Generally, a Result is an object that will package up
80 | // the hbase return into the form you find most palatable.
81 | Get g = new Get(Bytes.toBytes("myLittleRow"));
82 | Result r = table.get(g);
83 | byte [] value = r.getValue(Bytes.toBytes("cf"),
84 | Bytes.toBytes("someQualifier"));
85 |
86 | // If we convert the value bytes, we should get back 'Some Value', the
87 | // value we inserted at this location.
88 | String valueStr = Bytes.toString(value);
89 | System.out.println("GET: " + valueStr);
90 |
91 | // Sometimes, you won't know the row you're looking for. In this case, you
92 | // use a Scanner. This will give you cursor-like interface to the contents
93 | // of the table. To set up a Scanner, do like you did above making a Put
94 | // and a Get, create a Scan. Adorn it with column names, etc.
95 | Scan s = new Scan();
96 | s.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("someQualifier"));
97 | ResultScanner scanner = table.getScanner(s);
98 | try {
99 | // Scanners return Result instances.
100 | // Now, for the actual iteration. One way is to use a while loop like so:
101 | for (Result rr = scanner.next(); rr != null; rr = scanner.next()) {
102 | // print out the row we found and the columns we were looking for
103 | System.out.println("Found row: " + rr);
104 | }
105 |
106 | // The other approach is to use a foreach loop. Scanners are iterable!
107 | // for (Result rr : scanner) {
108 | // System.out.println("Found row: " + rr);
109 | // }
110 | } finally {
111 | // Make sure you close your scanners when you are done!
112 | // Thats why we have it inside a try/finally clause
113 | scanner.close();
114 | }
115 |
116 | // Close your table and cluster connection.
117 | } finally {
118 | if (table != null) table.close();
119 | }
120 | } finally {
121 | connection.close();
122 | }
123 | }
124 | }
125 |
--------------------------------------------------------------------------------
/src/main/java/hbase/readme.txt:
--------------------------------------------------------------------------------
1 | # java版本hbase curd 直接中官网上查找就有了
2 | hbase 操作提供了scala版本和java版本,目的是为了适用不同的场景
3 | 1 中实时计算中为了调整保存中hbase中的数据就要用spark streaming操作
4 | 2 标签提取过程把标签写入到hbase调用的是java接口更合适
--------------------------------------------------------------------------------
/src/main/java/kafka/KafkaConsumer.java:
--------------------------------------------------------------------------------
1 | package kafka;
2 | import kafka.consumer.Consumer;
3 | import kafka.consumer.ConsumerConfig;
4 | import kafka.consumer.ConsumerIterator;
5 | import kafka.consumer.KafkaStream;
6 | import kafka.javaapi.consumer.ConsumerConnector;
7 |
8 | import java.util.HashMap;
9 | import java.util.List;
10 | import java.util.Map;
11 | import java.util.Properties;
12 |
13 |
14 | /**
15 | * Created by lzz on 6/14/16.
16 | */
17 | public class KafkaConsumer extends Thread {
18 | private String topic;
19 |
20 | public KafkaConsumer(String topic){
21 | super();
22 | this.topic = topic;
23 | }
24 |
25 |
26 | @Override
27 | public void run() {
28 | ConsumerConnector consumer = createConsumer();
29 | Map topicCountMap = new HashMap();
30 | topicCountMap.put(topic, 1); // 一次从主题中获取一个数据
31 | Map>> messageStreams = consumer.createMessageStreams(topicCountMap);
32 | KafkaStream stream = messageStreams.get(topic).get(0);// 获取每次接收到的这个数据
33 | ConsumerIterator iterator = stream.iterator();
34 | while(iterator.hasNext()){
35 | String message = new String(iterator.next().message());
36 | System.out.println("接收到: " + message);
37 | }
38 | }
39 |
40 | private ConsumerConnector createConsumer() {
41 | Properties properties = new Properties();
42 | properties.put("zookeeper.connect", "192.168.1.223:2181,192.168.1.221:2181,192.168.1.222:2181");//声明zk
43 | properties.put("group.id", "group001");// 必须要使用别的组名称, 如果生产者和消费者都在同一组,则不能访问同一组内的topic数据
44 | return Consumer.createJavaConsumerConnector(new ConsumerConfig(properties));
45 | }
46 |
47 |
48 | public static void main(String[] args) {
49 | new KafkaConsumer("my-replicated-topic").start();// 使用kafka集群中创建好的主题 test
50 |
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/src/main/java/kafka/KafkaProducer.java:
--------------------------------------------------------------------------------
1 | package kafka;
2 | import kafka.javaapi.producer.Producer;
3 | import kafka.producer.KeyedMessage;
4 | import kafka.producer.ProducerConfig;
5 | import kafka.serializer.StringEncoder;
6 |
7 | import java.io.IOException;
8 | import java.util.Properties;
9 | import java.util.concurrent.TimeUnit;
10 |
11 | /**
12 | * Created by lzz on 6/14/16.
13 | */
14 | public class KafkaProducer extends Thread {
15 | private String topic;
16 |
17 | public KafkaProducer(String topic){
18 | super();
19 | this.topic = topic;
20 | }
21 |
22 |
23 | @Override
24 | public void run() {
25 | Producer producer = createProducer();
26 | while(true){
27 | byte[] buffer=new byte[512];
28 | try {
29 | //用户输入
30 | System.in.read(buffer);
31 | String str=new String(buffer);
32 | String str_format = str.replaceAll("[\\t\\n\\r]", "");
33 | producer.send(new KeyedMessage(topic, str_format ));
34 | } catch (IOException e) {
35 | e.printStackTrace();
36 | }
37 |
38 | try {
39 | TimeUnit.SECONDS.sleep(1);
40 | } catch (InterruptedException e) {
41 | e.printStackTrace();
42 | }
43 | }
44 | }
45 |
46 | private Producer createProducer() {
47 | Properties properties = new Properties();
48 | properties.put("zookeeper.connect", "192.168.1.221:2181,192.168.1.222:2181,192.168.1.223:2181");//声明zk
49 | properties.put("serializer.class", StringEncoder.class.getName());
50 | properties.put("metadata.broker.list", "192.168.1.223:9093,192.168.1.223:9094");// 声明kafka broker
51 | return new Producer(new ProducerConfig(properties));
52 | }
53 |
54 |
55 | public static void main(String[] args) {
56 | new KafkaProducer("my-replicated-topic").start();// 使用kafka集群中创建好的主题 test
57 |
58 | }
59 | }
60 |
61 |
62 | // user1|3|5
63 |
--------------------------------------------------------------------------------
/src/main/resources/hbase-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
23 |
24 |
25 |
26 | hbase.rootdir
27 | hdfs://192.168.1.221:9000/hbase
28 |
29 |
30 |
31 | hbase.cluster.distributed
32 | true
33 |
34 |
35 |
36 | hbase.zookeeper.quorum
37 | 192.168.1.221:2181,192.168.1.222:2181,192.168.1.223:2181
38 |
39 |
40 |
--------------------------------------------------------------------------------
/src/main/scala/hbase/Hbase_CRUD.scala:
--------------------------------------------------------------------------------
1 | package hbase
2 |
3 | import org.apache.hadoop.hbase.util.Bytes
4 | import org.apache.hadoop.hbase.{HBaseConfiguration, HColumnDescriptor, HTableDescriptor, TableName}
5 | import org.apache.hadoop.hbase.client._
6 | import org.apache.spark.{SparkConf, SparkContext}
7 |
8 | import scala.collection.JavaConversions._
9 |
10 | /**
11 | * Created by lzz on 6/28/16.
12 | * 相关例子 https://wiki.apache.org/hadoop/Hbase/Scala
13 | */
14 | object Hbase_CRUD extends App{
15 | val sparkConf = new SparkConf().setMaster("local")
16 | .setAppName("My App")
17 | .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
18 | val sc = new SparkContext(sparkConf)
19 |
20 | var conf = HBaseConfiguration.create()
21 | conf.set("hbase.master", "hadoop006:16010" )
22 | conf.addResource( "/home/lzz/work/idea_work/spark_work/spark_recommend/src/main/resources/hbase-site.xml" )
23 | //Connection 的创建链接
24 | val conn = ConnectionFactory.createConnection(conf)
25 | //从Connection获得 Admin
26 | val admin = conn.getAdmin
27 |
28 | //创建 test 表
29 | val testTable = TableName.valueOf("test")
30 | val tableDescr = new HTableDescriptor(testTable)
31 | tableDescr.addFamily(new HColumnDescriptor("cf".getBytes))
32 | println("Creating table `test`. ")
33 | if (admin.tableExists(testTable)) {
34 | admin.disableTable(testTable)
35 | admin.deleteTable(testTable)
36 | }
37 | admin.createTable(tableDescr)
38 | println("Done!")
39 |
40 | try{
41 | //获取 test 表
42 | val table = conn.getTable(testTable)
43 | try{
44 | //准备插入一条 key 为 rk001 的数据
45 | val p = new Put("rk001".getBytes)
46 | //为put操作指定 column 和 value
47 | p.addColumn("cf".getBytes,"name".getBytes, "linzhouzhi".getBytes)
48 | p.addColumn("cf".getBytes,"password".getBytes, "22222".getBytes)
49 | //提交
50 | table.put(p)
51 |
52 | //查询某条数据
53 | val g = new Get("rk001".getBytes)
54 | val result = table.get(g)
55 | val value = Bytes.toString(result.getValue("cf".getBytes,"name".getBytes))
56 | println("get rk001: "+value)
57 |
58 | //扫描数据
59 | val s = new Scan()
60 | s.addColumn("cf".getBytes,"name".getBytes)
61 | val scanner = table.getScanner(s)
62 |
63 | try{
64 | for(r <- scanner){
65 | println("found row: "+r)
66 | println("found value: "+Bytes.toString(r.getValue("cf".getBytes,"name".getBytes)))
67 | }
68 | }finally {
69 | //关闭scanner
70 | scanner.close()
71 | }
72 |
73 | //删除
74 | val d = new Delete("rk001".getBytes)
75 | d.addColumn("cf".getBytes,"name".getBytes)
76 | table.delete(d)
77 |
78 | }finally {
79 | if(table != null) table.close()
80 | }
81 |
82 | }finally {
83 | //关闭链接
84 | conn.close()
85 | }
86 |
87 | }
88 |
--------------------------------------------------------------------------------
/src/main/scala/hbase/Spark_RDD.scala:
--------------------------------------------------------------------------------
1 | package hbase
2 |
3 | import org.apache.hadoop.hbase.HBaseConfiguration
4 | import org.apache.hadoop.hbase.client.Scan
5 | import org.apache.hadoop.hbase.mapreduce.TableInputFormat
6 | import org.apache.hadoop.hbase.protobuf.ProtobufUtil
7 | import org.apache.hadoop.hbase.util.{Base64, Bytes}
8 | import org.apache.spark.{SparkConf, SparkContext}
9 |
10 | /**
11 | * Created by lzz on 6/28/16.
12 | */
13 | object Spark_RDD extends App{
14 | val sparkConf = new SparkConf().setMaster("local")
15 | .setAppName("My App")
16 | .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
17 | val sc = new SparkContext(sparkConf)
18 |
19 | var hConf = HBaseConfiguration.create()
20 | hConf.set("hbase.master", "hadoop006:16010" )
21 | hConf.addResource( "main/resources/hbase-site.xml" )
22 | hConf.set(TableInputFormat.INPUT_TABLE, "user_tags")
23 |
24 |
25 | var scan = new Scan();
26 | scan.addFamily(Bytes.toBytes("cf"))
27 | var proto = ProtobufUtil.toScan(scan);
28 | var ScanToString = Base64.encodeBytes(proto.toByteArray());
29 | hConf.set(TableInputFormat.SCAN, ScanToString);
30 |
31 | val usersRDD = sc.newAPIHadoopRDD( hConf, classOf[TableInputFormat],
32 | classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
33 | classOf[org.apache.hadoop.hbase.client.Result])
34 |
35 | usersRDD.map( x => x._2 )
36 | .map( result => ( result.getRow, List( result.getValue( Bytes.toBytes("cf"),Bytes.toBytes("h1")),result.getValue( Bytes.toBytes("cf"),Bytes.toBytes("name") ) ) ))
37 | .map( row => ( new String(row._1), row._2 ) )
38 | .foreach(
39 | r => ( println( r._1 + "----" + r._2 ) )
40 | )
41 | }
42 |
--------------------------------------------------------------------------------
/src/main/scala/matrix/TRowMatrix.scala:
--------------------------------------------------------------------------------
1 | package matrix
2 |
3 | import org.apache.spark.mllib.linalg.{Vector, Vectors}
4 | import org.apache.spark.mllib.linalg.distributed.RowMatrix
5 |
6 | /**
7 | * Created by lzz on 6/28/16.
8 | */
9 | object TRowMatrix {
10 | def transposeRowMatrix(m: RowMatrix): RowMatrix = {
11 | val transposedRowsRDD = m.rows.zipWithIndex.map{case (row, rowIndex) => rowToTransposedTriplet(row, rowIndex)}
12 | .flatMap(x => x)
13 | .groupByKey
14 | .sortByKey().map(_._2)
15 | .map(buildRow)
16 | new RowMatrix(transposedRowsRDD)
17 | }
18 |
19 | private def rowToTransposedTriplet(row: Vector, rowIndex: Long): Array[(Long, (Long, Double))] = {
20 | val indexedRow = row.toArray.zipWithIndex
21 | indexedRow.map{case (value, colIndex) => (colIndex.toLong, (rowIndex, value))}
22 | }
23 |
24 | private def buildRow(rowWithIndexes: Iterable[(Long, Double)]): Vector = {
25 | val resArr = new Array[Double](rowWithIndexes.size)
26 | rowWithIndexes.foreach{case (index, value) =>
27 | resArr(index.toInt) = value
28 | }
29 | Vectors.dense(resArr)
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/src/main/scala/spark_streaming/Demo.scala:
--------------------------------------------------------------------------------
1 | package spark_streaming
2 |
3 | import org.apache.spark.streaming.{Seconds, StreamingContext}
4 | import org.apache.spark.SparkConf
5 |
6 | /**
7 | * Created by lzz on 6/28/16.
8 | */
9 | object Demo extends App{
10 | val sparkConf = new SparkConf().setMaster("local")
11 | .setAppName("My App")
12 | val ssc = new StreamingContext(sparkConf, Seconds(1))
13 | val lines = ssc.socketTextStream("localhost", 9999)
14 | val words = lines.flatMap(_.split(" "))
15 | val pairs = words.map(word => (word, 1))
16 | val wordCounts = pairs.reduceByKey(_ + _)
17 | wordCounts.print()
18 | ssc.start()
19 | ssc.awaitTermination()
20 | }
21 |
--------------------------------------------------------------------------------
/src/main/scala/spark_streaming/Kafka_similar.scala:
--------------------------------------------------------------------------------
1 | package spark_streaming
2 |
3 | import org.apache.hadoop.hbase.{HBaseConfiguration, TableName}
4 | import org.apache.hadoop.hbase.client.{ConnectionFactory, Put}
5 | import org.apache.spark.SparkConf
6 | import org.apache.spark.mllib.linalg._
7 | import org.apache.spark.streaming.kafka.KafkaUtils
8 | import org.apache.spark.streaming.{Minutes, Seconds, StreamingContext}
9 | import redis.clients.jedis.Jedis
10 |
11 | import scala.collection.immutable.IndexedSeq
12 |
13 | /**
14 | * Created by lzz on 6/28/16.
15 | */
16 | object Kafka_similar extends App{
17 |
18 | var conf = HBaseConfiguration.create()
19 | conf.set("hbase.master", "hadoop006:16010" )
20 | conf.addResource( "/home/lzz/work/idea_work/spark_work/spark_recommend/src/main/resources/hbase-site.xml" )
21 | //Connection 的创建链接
22 | val conn = ConnectionFactory.createConnection(conf)
23 | val testTable = TableName.valueOf("test")
24 | val table = conn.getTable(testTable)
25 |
26 | val jedis = new Jedis("192.168.1.220",6379)
27 | jedis.incr( "web_80_07_pv" )
28 | jedis.close()
29 |
30 | val ad_tags: Matrix = Matrices.dense(3, 2, Array(1.0, 3.0, 5.0, 2.0, 4.0, 6.0))
31 | //val dm_sparse: Matrix = Matrices.sparse( 3,2, Array(0, 1,6), Array(0, 0, 1,1,2,2), Array(3, 4, 1,2,4,5))
32 | val user_arr = Array( 1.0, 3.0 )
33 | val userTags: Vector = Vectors.dense( user_arr )
34 | ad_tags.multiply( userTags ).toArray.foreach( println )
35 |
36 | val sparkConf = new SparkConf().setMaster("local")
37 | .setAppName("My App")
38 | val ssc = new StreamingContext(sparkConf, Seconds(5))
39 |
40 | //val Array(zkQuorum, group, topics, numThreads) = args
41 | val zkQuorum = "192.168.1.221:2181,192.168.1.222:2181,192.168.1.223:2181"
42 | val group = "group001"
43 | val topics = "my-replicated-topic"
44 | val numThreads = 2
45 |
46 | ssc.checkpoint("checkpoint")
47 | val topicMap = topics.split(",").map((_, numThreads.toInt)).toMap
48 | val lines = KafkaUtils.createStream(ssc, zkQuorum, group, topicMap).map(_._2)
49 | val user_tags = lines.map(_.split('|'))
50 | val ad_user = user_tags.map(x => (x(0), format_tag( x )))
51 | .map( x => (sim_write( x._1, x._2) ) )
52 | .print()
53 |
54 | // .reduceByKeyAndWindow(_ + _, _ - _, Minutes(10), Seconds(2), 2)
55 | // wordCounts.print()
56 |
57 | ssc.start()
58 | ssc.awaitTermination()
59 |
60 | def format_tag( x: Array[String] ): DenseVector ={
61 | val tag_arr = Array( x(1).toDouble, x(2).toDouble )
62 | val tag_vector = Vectors.dense( tag_arr )
63 |
64 | //ad_tags.multiply( tag_vector ).toJson.zip( Array("ad1","ad2","ad3") )
65 | ad_tags.multiply( tag_vector )
66 | }
67 |
68 | def sim_write( key: String, value: Vector ): Unit ={
69 | val ads = Array( "ad1","ad2","ad3" )
70 | val ads_size = ads.size
71 |
72 | for( i <- 0 to ads_size ){
73 | //准备插入一条 key 为 rk001 的数据
74 | val rowkey = key + ads(i)
75 | val p = new Put( rowkey.getBytes )
76 | //为put操作指定 column 和 value
77 | p.addColumn("cf".getBytes, "value".getBytes(), value(i).toString.getBytes )
78 | table.put(p)
79 | val tmp = key + "-" + ads(i) + ":" + value(i)
80 | println( tmp )
81 | }
82 | }
83 | }
84 |
--------------------------------------------------------------------------------
/src/main/scala/spark_streaming/Kafka_wc.scala:
--------------------------------------------------------------------------------
1 | package spark_streaming
2 |
3 | import org.apache.spark.SparkConf
4 | import org.apache.spark.streaming.kafka.KafkaUtils
5 | import org.apache.spark.streaming.{Minutes, Seconds, StreamingContext}
6 |
7 | /**
8 | * Created by lzz on 6/28/16.
9 | */
10 | object Kafka_wc extends App{
11 | val sparkConf = new SparkConf().setMaster("local")
12 | .setAppName("My App")
13 | val ssc = new StreamingContext(sparkConf, Seconds(1))
14 |
15 | //val Array(zkQuorum, group, topics, numThreads) = args
16 | val zkQuorum = "192.168.1.221:2181,192.168.1.222:2181,192.168.1.223:2181"
17 | val group = "group001"
18 | val topics = "my-replicated-topic"
19 | val numThreads = 2
20 |
21 | ssc.checkpoint("checkpoint")
22 | val topicMap = topics.split(",").map((_, numThreads.toInt)).toMap
23 | val lines = KafkaUtils.createStream(ssc, zkQuorum, group, topicMap).map(_._2)
24 | val words = lines.flatMap(_.split(" "))
25 | val wordCounts = words.map(x => (x, 1L))
26 | .reduceByKeyAndWindow(_ + _, _ - _, Minutes(10), Seconds(2), 2)
27 | wordCounts.print()
28 |
29 | ssc.start()
30 | ssc.awaitTermination()
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/src/main/scala/tags/HbaseMatrix.scala:
--------------------------------------------------------------------------------
1 | package tags
2 |
3 | import org.apache.hadoop.hbase.HBaseConfiguration
4 | import org.apache.hadoop.hbase.client.{Result, Scan}
5 | import org.apache.hadoop.hbase.mapreduce.TableInputFormat
6 | import org.apache.hadoop.hbase.protobuf.ProtobufUtil
7 | import org.apache.hadoop.hbase.util.{Base64, Bytes}
8 | import org.apache.spark.mllib.linalg.distributed.RowMatrix
9 | import org.apache.spark.mllib.linalg.{Matrices, Matrix, Vector, Vectors}
10 | import org.apache.spark.rdd.RDD
11 | import org.apache.spark.{SparkConf, SparkContext}
12 |
13 | /**
14 | * Created by lzz on 6/29/16.
15 | */
16 | object HbaseMatrix extends App{
17 | val sparkConf = new SparkConf().setMaster("local")
18 | .setAppName("My App")
19 | .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
20 | val sc = new SparkContext(sparkConf)
21 |
22 | var conf = HBaseConfiguration.create()
23 | conf.set("hbase.master", "hadoop006:16010" )
24 | conf.addResource( "main/resources/hbase-site.xml" )
25 | conf.set(TableInputFormat.INPUT_TABLE, "user_tags")
26 |
27 |
28 | //标签库,可以改成数据库中直接读取
29 | val tags = Array( "tag16", "tag18", "tag9", "tag15", "tag19", "tag6", "tag13", "tag7", "tag23", "tag11", "tag24", "tag27", "tag4", "tag8", "tag12", "tag3" )
30 | var scan = new Scan();
31 | scan.addFamily(Bytes.toBytes("cf"))
32 | var proto = ProtobufUtil.toScan(scan);
33 | var ScanToString = Base64.encodeBytes(proto.toByteArray());
34 | conf.set(TableInputFormat.SCAN, ScanToString);
35 | //从数据库中读取(用户-标签)表的数据
36 | val usersRDD = sc.newAPIHadoopRDD( conf, classOf[TableInputFormat],
37 | classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
38 | classOf[org.apache.hadoop.hbase.client.Result])
39 |
40 | //用户-标签
41 | val user_tags = usersRDD.map( x => x._2 )
42 | .map( result => ( result.getRow, result.getValue( Bytes.toBytes("cf"),Bytes.toBytes("total")) ))
43 | .map( row => ( new String(row._1), new String ( row._2 ) ) )
44 | .map( row =>( row._1.toString.split( '+' )(0),row._1.toString.split( '+' )(1) + ":" + row._2 ) )
45 | .groupByKey
46 | .map( row => ( row._1, row._2.toList ))
47 | .sortBy( row => row._1)
48 |
49 |
50 | //每一行转化为 Local dense vector 用于生成矩阵
51 | val rows: RDD[Vector] = user_tags
52 | .map( row => tag_vector( row._2 ) )
53 | //生成分布式 rowMatrix 矩阵
54 | val mat: RowMatrix = new RowMatrix( rows )
55 | //某个广告的标签,其中顺序跟 tags的顺序是一样的
56 | val ad_tags = Array(0.0,0.0,0.0,0.0,0.0,0.0,8.0,9.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0)
57 | //转化为矩阵用于和用户标签矩阵进行相乘
58 | val dm: Matrix = Matrices.dense( tags.size, 1, ad_tags )
59 | //相乘得到广告和用户的相识度矩阵(这边不应该这样直接相乘应该要优化)
60 | val ad_recommoned = mat.multiply( dm ).rows.map( x => ( x(0) ))
61 | //获取用户rdd
62 | val user = user_tags.map( x => x._1 )
63 | //获取topN用户,这些用户就是该广告要投放的
64 | ad_recommoned.zip( user ).sortBy( x => x._1 ).top( 3 ).foreach( println )
65 |
66 | def tag_vector( list: List[String] ): Vector ={
67 | var row_tag = new Array[ Double ]( tags.size )
68 | for( l <- list ){
69 | val tag_map = l.split( ":" )
70 | val tag_i = tags.indexOf( tag_map( 0 ) )
71 | row_tag(tag_i) = tag_map( 1 ).toDouble
72 | }
73 | Vectors.dense( row_tag )
74 | }
75 |
76 | }
77 |
78 |
--------------------------------------------------------------------------------
/src/main/scala/tags/localMatrix.scala:
--------------------------------------------------------------------------------
1 | package tags
2 | import matrix.TRowMatrix
3 | import org.apache.spark.mllib.linalg.{Vectors, _}
4 | import org.apache.spark.mllib.linalg.distributed.RowMatrix
5 | import org.apache.spark.{SparkConf, SparkContext}
6 |
7 | /**
8 | * Created by lzz on 6/28/16.
9 | */
10 | object localMatrix extends App{
11 | val sparkConf = new SparkConf().setMaster("local")
12 | .setAppName("My App")
13 | .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
14 | val sc = new SparkContext(sparkConf)
15 |
16 | //标签数据每个元素对应的是标签中的id
17 | val tags = Array(1, 2, 3, 5, 6, 8, 9, 11, 12, 13, 14, 17)
18 | //用户id 数据
19 | val users = Array(2, 3, 4, 5, 6, 8, 9, 12, 13, 14, 15, 16, 18, 19, 22, 24, 25, 26, 29)
20 | //广告id数据
21 | val ads = Array( 3, 5, 6, 7, 8 )
22 | // (广告id,标签id,权重)
23 | val ad_tags = Seq(
24 | Vector( 3, 1,1),
25 | Vector( 5, 2,1),
26 | Vector( 6, 3,1),
27 | Vector( 7, 5,1),
28 | Vector( 8, 6,1),
29 | Vector( 6, 8,1),
30 | Vector( 8, 9,1),
31 | Vector( 3, 5,1),
32 | Vector( 3, 5, 1),
33 | Vector( 5, 6, 1),
34 | Vector( 6, 8, 1),
35 | Vector( 7, 9, 1),
36 | Vector( 8, 11,1),
37 | Vector( 6, 12,1),
38 | Vector( 8, 13,1)
39 | )
40 | // (用户id, 标签id, 投放次数 )
41 | val user_tags = Seq(
42 | Vector( 2,1,1),
43 | Vector( 2,2,1),
44 | Vector( 3,3,1),
45 | Vector( 4,5,1),
46 | Vector( 5,6,1),
47 | Vector( 6,8,1),
48 | Vector( 8,9,1),
49 | Vector( 9,5,1),
50 | Vector( 12,5, 1),
51 | Vector( 12,6, 1),
52 | Vector( 13,8, 1),
53 | Vector( 14,9, 1),
54 | Vector( 15,11,1),
55 | Vector( 16,12,1),
56 | Vector( 18,13,1),
57 | Vector( 19,14,1),
58 | Vector( 22,17,1),
59 | Vector( 24,1, 1),
60 | Vector( 25,2, 1),
61 | Vector( 26,3, 1),
62 | Vector( 22,5, 1),
63 | Vector( 24,6, 1),
64 | Vector( 6, 8, 1),
65 | Vector( 8, 9, 1),
66 | Vector( 9, 11,1),
67 | Vector( 12,12,1),
68 | Vector( 13,13,1),
69 | Vector( 14,14,1),
70 | Vector( 15,17,1),
71 | Vector( 16,1,1),
72 | Vector( 18,1,1),
73 | Vector( 19,1,1),
74 | Vector( 3,1,1),
75 | Vector( 4,3,1),
76 | Vector( 5,6,1),
77 | Vector( 6,5,1),
78 | Vector( 12,1, 1),
79 | Vector( 13,2, 1),
80 | Vector( 14,3, 1),
81 | Vector( 15,5, 1),
82 | Vector( 16,6, 1),
83 | Vector( 18,8, 1),
84 | Vector( 19,9, 1),
85 | Vector( 22,11,1),
86 | Vector( 24,12,1),
87 | Vector( 25,13,3)
88 | )
89 |
90 | //创建一个用户和标签的空矩阵
91 | var ut_matrix = Array.ofDim[Double](users.size, tags.size)
92 | // 将user_tags 中的数据添加到矩阵中
93 | for( user_tag <- user_tags ){
94 | val user_i = users.indexOf( user_tag(0) )
95 | val tag_i = tags.indexOf( user_tag(1) )
96 | ut_matrix(user_i)(tag_i) = user_tag(2)
97 | }
98 |
99 | //把二维数组转化为,类型为vector的一维数组
100 | var userSeq = new Array[ Vector ]( users.size )
101 | for( i <- 0 to (users.size - 1) ){
102 | val ut_vector: Vector = Vectors.dense( ut_matrix( i ) )
103 | userSeq( i ) = ut_vector
104 | }
105 | //转化为行矩阵
106 | val userM = new RowMatrix( sc.parallelize( userSeq.toSeq ) )
107 | println( "用户-标签矩阵××××××××××××" )
108 | userM.rows.foreach( println )
109 |
110 | //创建标签-广告矩阵
111 | var at_matrix = Array.ofDim[Double]( tags.size, ads.size )
112 | for( ad_tag <- ad_tags ){
113 | val ad_i = ads.indexOf( ad_tag(0) )
114 | val tag_i = tags.indexOf( ad_tag(1) )
115 | at_matrix(tag_i)(ad_i) = ad_tag(2)
116 | }
117 |
118 | //按顺序将 标签-广告 矩阵中的数据写入到 adSeq 中用于建立稠密矩阵
119 | var adSeq = new Array[ Double ]( tags.size * ads.size )
120 | for( i <- 0 to (tags.size - 1) ){
121 | for( item <- 0 to ( ads.size - 1) ){
122 | adSeq( i*ads.size + item ) = at_matrix(i)(item)
123 | }
124 | }
125 | //建立稠密矩阵
126 | val adM: DenseMatrix = new DenseMatrix(tags.size, ads.size, adSeq )
127 | println( "标签-广告矩阵××××××××××××××××××" )
128 | adM.colIter.foreach( println )
129 |
130 | //矩阵相乘得到相识矩阵
131 | val rM = TRowMatrix.transposeRowMatrix( userM multiply( adM ) )
132 | println( "相识矩阵×××××××××××××××" )
133 | rM.rows.foreach( println )
134 |
135 | }
136 |
--------------------------------------------------------------------------------
/target/.history:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/.history
--------------------------------------------------------------------------------
/target/resolution-cache/default/spark_recommend$sbt_2.11/1.0/resolved.xml.properties:
--------------------------------------------------------------------------------
1 | #default#spark_recommend$sbt_2.11;1.0 resolved revisions
2 | #Tue Jun 28 01:36:47 PDT 2016
3 | +revision\:\#@\#\:+0.13.8\:\#@\#\:+module\:\#@\#\:+sbt\:\#@\#\:+organisation\:\#@\#\:+org.scala-sbt\:\#@\#\:+branch\:\#@\#\:+@\#\:NULL\:\#@\:\#@\#\:=0.13.8 release 0.13.8 null
4 |
--------------------------------------------------------------------------------
/target/resolution-cache/default/spark_recommend$sbt_2.11/1.0/resolved.xml.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/target/resolution-cache/default/spark_recommend$sources_2.11/1.0/resolved.xml.properties:
--------------------------------------------------------------------------------
1 | #default#spark_recommend$sources_2.11;1.0 resolved revisions
2 | #Tue Jun 28 01:36:47 PDT 2016
3 | +revision\:\#@\#\:+1.0.4\:\#@\#\:+module\:\#@\#\:+scala-xml_2.11\:\#@\#\:+organisation\:\#@\#\:+org.scala-lang.modules\:\#@\#\:+branch\:\#@\#\:+@\#\:NULL\:\#@\:\#@\#\:=1.0.4 release 1.0.4 null
4 | +revision\:\#@\#\:+1.0.4\:\#@\#\:+module\:\#@\#\:+scala-parser-combinators_2.11\:\#@\#\:+organisation\:\#@\#\:+org.scala-lang.modules\:\#@\#\:+branch\:\#@\#\:+@\#\:NULL\:\#@\:\#@\#\:=1.0.4 release 1.0.4 null
5 | +revision\:\#@\#\:+2.11.8\:\#@\#\:+module\:\#@\#\:+scala-reflect\:\#@\#\:+organisation\:\#@\#\:+org.scala-lang\:\#@\#\:+branch\:\#@\#\:+@\#\:NULL\:\#@\:\#@\#\:+info.apiURL\:\#@\#\:+http\://www.scala-lang.org/api/2.11.8/\:\#@\#\:=2.11.8 ? 2.11.8 null
6 | +revision\:\#@\#\:+2.11.8\:\#@\#\:+module\:\#@\#\:+scala-compiler\:\#@\#\:+organisation\:\#@\#\:+org.scala-lang\:\#@\#\:+branch\:\#@\#\:+@\#\:NULL\:\#@\:\#@\#\:=2.11.8 release 2.11.8 null
7 | +revision\:\#@\#\:+2.12.1\:\#@\#\:+module\:\#@\#\:+jline\:\#@\#\:+organisation\:\#@\#\:+jline\:\#@\#\:+branch\:\#@\#\:+@\#\:NULL\:\#@\:\#@\#\:=2.12.1 release 2.12.1 null
8 | +revision\:\#@\#\:+2.11.8\:\#@\#\:+module\:\#@\#\:+scala-library\:\#@\#\:+organisation\:\#@\#\:+org.scala-lang\:\#@\#\:+branch\:\#@\#\:+@\#\:NULL\:\#@\:\#@\#\:+info.apiURL\:\#@\#\:+http\://www.scala-lang.org/api/2.11.8/\:\#@\#\:=2.11.8 ? 2.11.8 null
9 |
--------------------------------------------------------------------------------
/target/resolution-cache/default/spark_recommend$sources_2.11/1.0/resolved.xml.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/target/resolution-cache/default/spark_recommend_2.11/1.0/resolved.xml.properties:
--------------------------------------------------------------------------------
1 | #default#spark_recommend_2.11;1.0 resolved revisions
2 | #Tue Jun 28 01:36:46 PDT 2016
3 | +revision\:\#@\#\:+2.11.8\:\#@\#\:+module\:\#@\#\:+scala-compiler\:\#@\#\:+organisation\:\#@\#\:+org.scala-lang\:\#@\#\:+branch\:\#@\#\:+@\#\:NULL\:\#@\:\#@\#\:=2.11.8 release 2.11.8 null
4 | +revision\:\#@\#\:+2.11.8\:\#@\#\:+module\:\#@\#\:+scala-library\:\#@\#\:+organisation\:\#@\#\:+org.scala-lang\:\#@\#\:+branch\:\#@\#\:+@\#\:NULL\:\#@\:\#@\#\:=2.11.8 ? 2.11.8 null
5 |
--------------------------------------------------------------------------------
/target/resolution-cache/default/spark_recommend_2.11/1.0/resolved.xml.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
9 |
10 | spark_recommend
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend$sources_2.11-compile-internal.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend$sources_2.11-compile.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend$sources_2.11-docs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend$sources_2.11-optional.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend$sources_2.11-plugin.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend$sources_2.11-pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend$sources_2.11-provided.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend$sources_2.11-runtime-internal.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend$sources_2.11-runtime.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend$sources_2.11-scala-tool.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend$sources_2.11-sources.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend$sources_2.11-test-internal.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend$sources_2.11-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend_2.11-compile-internal.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend_2.11-compile.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend_2.11-docs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend_2.11-optional.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend_2.11-plugin.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend_2.11-pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend_2.11-provided.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend_2.11-runtime-internal.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend_2.11-runtime.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend_2.11-scala-tool.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend_2.11-sources.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend_2.11-test-internal.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/default-spark_recommend_2.11-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/target/resolution-cache/reports/ivy-report.css:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | *
17 | */
18 |
19 | body {
20 | font-family:"Trebuchet MS",Verdana,Geneva,Arial,Helvetica,sans-serif;
21 | font-size:small;
22 | }
23 |
24 | div#logo {
25 | float: right;
26 | padding-left: 10px;
27 | padding-bottom: 10px;
28 | background: white;
29 | text-align: center;
30 | }
31 |
32 | #logo img {
33 | border: 0;
34 | }
35 |
36 | div#date {
37 | font-style: italic;
38 | padding-left: 60px;
39 | padding-bottom: 40px;
40 | }
41 |
42 |
43 | h1 {
44 | margin-bottom:2px;
45 |
46 | border-color:#7A9437;
47 | border-style:solid;
48 | border-width:0 0 3px 0;
49 | }
50 |
51 | span#module {
52 | color:#7A9437;
53 | text-decoration:none;
54 | }
55 |
56 | span#organisation {
57 | color:black;
58 | text-decoration:none;
59 | }
60 |
61 | #confmenu {
62 | color: #000;
63 | border-bottom: 2px solid black;
64 | margin: 12px 0px 0px 0px;
65 | padding: 0px;
66 | z-index: 1;
67 | padding-left: 10px
68 | }
69 |
70 | #confmenu li {
71 | display: inline;
72 | overflow: hidden;
73 | list-style-type: none;
74 | }
75 |
76 | #confmenu a, a.active {
77 | color: #DEDECF;
78 | background: #898B5E;
79 | font: bold 1em "Trebuchet MS", Arial, sans-serif;
80 | border: 2px solid black;
81 | padding: 2px 5px 0px 5px;
82 | text-decoration: none;
83 | }
84 |
85 | /*
86 | background: #ABAD85 #CED4BD
87 | background: #DEE4CD
88 | */
89 |
90 | #confmenu a.active {
91 | color: #7A9437;
92 | background: #DEE4CD;
93 | border-bottom: 3px solid #DEE4CD;
94 | }
95 |
96 | #confmenu a:hover {
97 | color: #fff;
98 | background: #ADC09F;
99 | }
100 |
101 | #confmenu a:visited {
102 | color: #DEDECF;
103 | }
104 |
105 | #confmenu a.active:visited {
106 | color: #7A9437;
107 | }
108 |
109 | #confmenu a.active:hover {
110 | background: #DEE4CD;
111 | color: #DEDECF;
112 | }
113 |
114 | #content {
115 | background: #DEE4CD;
116 | padding: 20px;
117 | border: 2px solid black;
118 | border-top: none;
119 | z-index: 2;
120 | }
121 |
122 | #content a {
123 | text-decoration: none;
124 | color: #E8E9BE;
125 | }
126 |
127 | #content a:hover {
128 | background: #898B5E;
129 | }
130 |
131 |
132 | h2 {
133 | margin-bottom:2px;
134 | font-size:medium;
135 |
136 | border-color:#7A9437;
137 | border-style:solid;
138 | border-width:0 0 2px 0;
139 | }
140 |
141 | h3 {
142 | margin-top:30px;
143 | margin-bottom:2px;
144 | padding: 5 5 5 0;
145 | font-size: 24px;
146 | border-style:solid;
147 | border-width:0 0 2px 0;
148 | }
149 |
150 | h4 {
151 | margin-bottom:2px;
152 | margin-top:2px;
153 | font-size:medium;
154 |
155 | border-color:#7A9437;
156 | border-style:dashed;
157 | border-width:0 0 1px 0;
158 | }
159 |
160 | h5 {
161 | margin-bottom:2px;
162 | margin-top:2px;
163 | margin-left:20px;
164 | font-size:medium;
165 | }
166 |
167 | span.resolved {
168 | padding-left: 15px;
169 | font-weight: 500;
170 | font-size: small;
171 | }
172 |
173 |
174 | #content table {
175 | border-collapse:collapse;
176 | width:90%;
177 | margin:auto;
178 | margin-top: 5px;
179 | }
180 | #content thead {
181 | background-color:#CED4BD;
182 | border:1px solid #7A9437;
183 | }
184 | #content tbody {
185 | border-collapse:collapse;
186 | background-color:#FFFFFF;
187 | border:1px solid #7A9437;
188 | }
189 |
190 | #content th {
191 | font-family:monospace;
192 | border:1px solid #7A9437;
193 | padding:5px;
194 | }
195 |
196 | #content td {
197 | border:1px dotted #7A9437;
198 | padding:0 3 0 3;
199 | }
200 |
201 | #content table a {
202 | color:#7A9437;
203 | text-decoration:none;
204 | }
205 |
206 | #content table a:hover {
207 | background-color:#CED4BD;
208 | color:#7A9437;
209 | }
210 |
211 |
212 |
213 | table.deps {
214 | border-collapse:collapse;
215 | width:90%;
216 | margin:auto;
217 | margin-top: 5px;
218 | }
219 |
220 | table.deps thead {
221 | background-color:#CED4BD;
222 | border:1px solid #7A9437;
223 | }
224 | table.deps tbody {
225 | border-collapse:collapse;
226 | background-color:#FFFFFF;
227 | border:1px solid #7A9437;
228 | }
229 |
230 | table.deps th {
231 | font-family:monospace;
232 | border:1px solid #7A9437;
233 | padding:2;
234 | }
235 |
236 | table.deps td {
237 | border:1px dotted #7A9437;
238 | padding:0 3 0 3;
239 | }
240 |
241 |
242 |
243 |
244 |
245 | table.header {
246 | border:0;
247 | width:90%;
248 | margin:auto;
249 | margin-top: 5px;
250 | }
251 |
252 | table.header thead {
253 | border:0;
254 | }
255 | table.header tbody {
256 | border:0;
257 | }
258 | table.header tr {
259 | padding:0px;
260 | border:0;
261 | }
262 | table.header td {
263 | padding:0 3 0 3;
264 | border:0;
265 | }
266 |
267 | td.title {
268 | width:150px;
269 | margin-right:15px;
270 |
271 | font-size:small;
272 | font-weight:700;
273 | }
274 |
275 | td.title:first-letter {
276 | color:#7A9437;
277 | background-color:transparent;
278 | }
279 |
280 |
--------------------------------------------------------------------------------
/target/scala-2.11/classes/hbase-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
23 |
24 |
25 |
26 | hbase.rootdir
27 | hdfs://192.168.1.221:9000/hbase
28 |
29 |
30 |
31 | hbase.cluster.distributed
32 | true
33 |
34 |
35 |
36 | hbase.zookeeper.quorum
37 | 192.168.1.221:2181,192.168.1.222:2181,192.168.1.223:2181
38 |
39 |
40 |
--------------------------------------------------------------------------------
/target/scala-2.11/classes/hbase/Hbase_CRUD$$anonfun$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/hbase/Hbase_CRUD$$anonfun$1.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/hbase/Hbase_CRUD$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/hbase/Hbase_CRUD$.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/hbase/Hbase_CRUD$delayedInit$body.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/hbase/Hbase_CRUD$delayedInit$body.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/hbase/Hbase_CRUD.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/hbase/Hbase_CRUD.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/hbase/Hbase_CURD.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/hbase/Hbase_CURD.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/hbase/Spark_RDD$$anonfun$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/hbase/Spark_RDD$$anonfun$1.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/hbase/Spark_RDD$$anonfun$2.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/hbase/Spark_RDD$$anonfun$2.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/hbase/Spark_RDD$$anonfun$3.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/hbase/Spark_RDD$$anonfun$3.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/hbase/Spark_RDD$$anonfun$4.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/hbase/Spark_RDD$$anonfun$4.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/hbase/Spark_RDD$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/hbase/Spark_RDD$.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/hbase/Spark_RDD$delayedInit$body.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/hbase/Spark_RDD$delayedInit$body.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/hbase/Spark_RDD.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/hbase/Spark_RDD.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/hbase/readme.txt:
--------------------------------------------------------------------------------
1 | # java版本hbase curd 直接中官网上查找就有了
2 | hbase 操作提供了scala版本和java版本,目的是为了适用不同的场景
3 | 1 中实时计算中为了调整保存中hbase中的数据就要用spark streaming操作
4 | 2 标签提取过程把标签写入到hbase调用的是java接口更合适
--------------------------------------------------------------------------------
/target/scala-2.11/classes/kafka/KafkaConsumer.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/kafka/KafkaConsumer.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/kafka/KafkaProducer.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/kafka/KafkaProducer.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/matrix/TRowMatrix$$anonfun$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/matrix/TRowMatrix$$anonfun$1.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/matrix/TRowMatrix$$anonfun$2.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/matrix/TRowMatrix$$anonfun$2.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/matrix/TRowMatrix$$anonfun$3.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/matrix/TRowMatrix$$anonfun$3.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/matrix/TRowMatrix$$anonfun$4.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/matrix/TRowMatrix$$anonfun$4.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/matrix/TRowMatrix$$anonfun$matrix$TRowMatrix$$buildRow$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/matrix/TRowMatrix$$anonfun$matrix$TRowMatrix$$buildRow$1.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/matrix/TRowMatrix$$anonfun$matrix$TRowMatrix$$rowToTransposedTriplet$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/matrix/TRowMatrix$$anonfun$matrix$TRowMatrix$$rowToTransposedTriplet$1.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/matrix/TRowMatrix$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/matrix/TRowMatrix$.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/matrix/TRowMatrix.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/matrix/TRowMatrix.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Demo$$anonfun$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Demo$$anonfun$1.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Demo$$anonfun$2.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Demo$$anonfun$2.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Demo$$anonfun$3.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Demo$$anonfun$3.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Demo$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Demo$.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Demo$delayedInit$body.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Demo$delayedInit$body.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Demo.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Demo.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Kafka_similar$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Kafka_similar$.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Kafka_similar$delayedInit$body.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Kafka_similar$delayedInit$body.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Kafka_similar.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Kafka_similar.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Kafka_wc$$anonfun$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Kafka_wc$$anonfun$1.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Kafka_wc$$anonfun$2.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Kafka_wc$$anonfun$2.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Kafka_wc$$anonfun$3.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Kafka_wc$$anonfun$3.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Kafka_wc$$anonfun$4.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Kafka_wc$$anonfun$4.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Kafka_wc$$anonfun$5.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Kafka_wc$$anonfun$5.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Kafka_wc$$anonfun$6.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Kafka_wc$$anonfun$6.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Kafka_wc$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Kafka_wc$.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Kafka_wc$delayedInit$body.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Kafka_wc$delayedInit$body.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/spark_streaming/Kafka_wc.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/spark_streaming/Kafka_wc.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$1.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$10.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$10.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$11.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$11.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$2.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$2.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$3.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$3.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$4.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$4.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$5.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$5.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$6.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$6.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$7.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$7.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$8.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$8.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$9.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$9.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$tag_vector$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/HbaseMatrix$$anonfun$tag_vector$1.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/HbaseMatrix$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/HbaseMatrix$.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/HbaseMatrix$delayedInit$body.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/HbaseMatrix$delayedInit$body.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/HbaseMatrix.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/HbaseMatrix.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/localMatrix$$anonfun$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/localMatrix$$anonfun$1.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/localMatrix$$anonfun$2$$anonfun$apply$mcVI$sp$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/localMatrix$$anonfun$2$$anonfun$apply$mcVI$sp$1.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/localMatrix$$anonfun$2.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/localMatrix$$anonfun$2.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/localMatrix$$anonfun$3.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/localMatrix$$anonfun$3.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/localMatrix$$anonfun$4.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/localMatrix$$anonfun$4.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/localMatrix$$anonfun$5.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/localMatrix$$anonfun$5.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/localMatrix$$anonfun$6.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/localMatrix$$anonfun$6.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/localMatrix$$anonfun$7.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/localMatrix$$anonfun$7.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/localMatrix$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/localMatrix$.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/localMatrix$delayedInit$body.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/localMatrix$delayedInit$body.class
--------------------------------------------------------------------------------
/target/scala-2.11/classes/tags/localMatrix.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/scala-2.11/classes/tags/localMatrix.class
--------------------------------------------------------------------------------
/target/streams/$global/$global/dumpStructure/$global/streams/out:
--------------------------------------------------------------------------------
1 | [info] Writing structure to /tmp/sbt-structure.xml...
2 | [info] Done.
3 |
--------------------------------------------------------------------------------
/target/streams/$global/dependencyPositions/$global/streams/update_cache_2.11/input_dsp:
--------------------------------------------------------------------------------
1 | org.scala-lang
scala-library 2.11.8
--------------------------------------------------------------------------------
/target/streams/$global/dependencyPositions/$global/streams/update_cache_2.11/output_dsp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/streams/$global/dependencyPositions/$global/streams/update_cache_2.11/output_dsp
--------------------------------------------------------------------------------
/target/streams/$global/ivyConfiguration/$global/streams/out:
--------------------------------------------------------------------------------
1 | [debug] Other repositories:
2 | [debug] Default repositories:
3 | [debug] Using inline dependencies specified in Scala.
4 | [debug] Using inline dependencies specified in Scala.
5 |
--------------------------------------------------------------------------------
/target/streams/$global/ivySbt/$global/streams/out:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/streams/$global/ivySbt/$global/streams/out
--------------------------------------------------------------------------------
/target/streams/$global/projectDescriptors/$global/streams/out:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/streams/$global/projectDescriptors/$global/streams/out
--------------------------------------------------------------------------------
/target/streams/$global/update/$global/streams/update_cache_2.11/inputs:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/streams/$global/update/$global/streams/update_cache_2.11/inputs
--------------------------------------------------------------------------------
/target/streams/$global/update/$global/streams/update_cache_2.11/output:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/linzhouzhi/spark_recommend/ac3aa70de230441f05ad874dc522a3d596b72bfd/target/streams/$global/update/$global/streams/update_cache_2.11/output
--------------------------------------------------------------------------------
/target/streams/compile/externalDependencyClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 | /home/lzz/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.8.jar
2 |
--------------------------------------------------------------------------------
/target/streams/compile/managedClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 | /home/lzz/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.8.jar
2 |
--------------------------------------------------------------------------------
/target/streams/compile/unmanagedClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/target/streams/compile/unmanagedJars/$global/streams/export:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/target/streams/runtime/externalDependencyClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 | /home/lzz/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.8.jar
2 |
--------------------------------------------------------------------------------
/target/streams/runtime/managedClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 | /home/lzz/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.8.jar
2 |
--------------------------------------------------------------------------------
/target/streams/runtime/unmanagedClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/target/streams/runtime/unmanagedJars/$global/streams/export:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/target/streams/test/externalDependencyClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 | /home/lzz/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.8.jar
2 |
--------------------------------------------------------------------------------
/target/streams/test/managedClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 | /home/lzz/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.8.jar
2 |
--------------------------------------------------------------------------------
/target/streams/test/unmanagedClasspath/$global/streams/export:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/target/streams/test/unmanagedJars/$global/streams/export:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------