├── manual-testing └── issues │ ├── 22 │ ├── 01_create-river.bat │ ├── 02_test-issue-22.bat │ ├── mongodb-river-script-filters.json │ └── test-issue-22.js │ ├── 26 │ ├── 01_create-river.bat │ ├── 02_test-issue-26.bat │ ├── mongodb-river-simple.json │ └── mongodb-river-simple-2.json │ ├── 42 │ ├── 01_create-river.bat │ ├── 02_test-issue-42.bat │ ├── mongodb-river-script-filters.json │ ├── test-issue-42-update-document.js │ └── test-issue-42-import-document.js │ ├── 46 │ ├── 01_create-river.bat │ ├── test-issue-42-update-document.js │ ├── test-issue-42-import-document.js │ ├── mongodb-river-sharded-collection.json │ └── 02_test-issue-42.bat │ ├── 54 │ ├── 02_create-river.bat │ ├── 01_create-index-with-mapping.bat │ ├── simple-replicaset │ │ ├── create-replicaset.bat │ │ ├── mongo-node-1.bat │ │ ├── mongo-node-2.bat │ │ ├── mongo-node-3.bat │ │ ├── start-replicaset.bat │ │ └── create-replicaset.js │ ├── 03_test-issue-54.bat │ ├── _04-add-london-document-in-index.json │ ├── _04-add-paris-document-in-index.json │ ├── _03-import-document.js │ ├── _04-add-nyc-document-in-index.json │ ├── _02_mongodb-river-simple.json │ ├── _03-geo-distance-query.json │ ├── 04_add-document-in-index.bat │ └── _01_create-index-with-mapping.json │ ├── 60 │ ├── mongo-credentials.txt │ ├── 01_create-river.bat │ ├── test-issue-60-update-document.js │ ├── test-issue-60-import-document.js │ ├── 02_test-issue-60.bat │ └── mongodb-river-sharded-collection.json │ ├── 61 │ ├── 01_create-river-with-admin-user.bat │ ├── test-issue-61.js │ ├── 05_create-river-with-local-user.bat │ ├── 03_create-river-with-local-and-mydb-user.bat │ ├── 02_test-issue-61.bat │ ├── 04_test-issue-61.bat │ ├── 06_test-issue-61.bat │ ├── secured-mongodb-river-simple.json │ ├── secured-mongodb-river-simple-local-user.json │ └── secured-mongodb-river-simple-local-mydb-user.json │ ├── 64 │ ├── 01_create-index-with-mapping.bat │ ├── simple-replicaset │ │ ├── create-replicaset.bat │ │ ├── mongo-node-1.bat │ │ ├── mongo-node-2.bat │ │ ├── mongo-node-3.bat │ │ ├── start-replicaset.bat │ │ └── create-replicaset.js │ ├── 03_test-issue-64.bat │ ├── 02_create-river.bat │ ├── _02_mongodb-river-author.json │ ├── _02_mongodb-river-book.json │ ├── _03-import-document.js │ ├── _03-has-child-query.json │ ├── _01_create-index-with-mapping.json │ └── $ES_HOME │ │ └── config │ │ └── templates │ │ └── template_1.json │ ├── 75 │ ├── disabled-mapping.json │ ├── 02_test-issue-75.bat │ ├── mongodb-river-simple.json │ ├── test-issue-75.js │ ├── 01_create-river.bat │ └── custom-mapping.json │ ├── 76 │ ├── 01_create-river.bat │ ├── 02_test-issue-76.bat │ ├── test-issue-76.js │ └── mongodb-river-simple.json │ ├── 79 │ ├── test-issue-79-2.js │ ├── 01_create-river.bat │ ├── test-issue-79-1.js │ ├── 02_test-issue-79.bat │ └── mongodb-river-simple.json │ ├── 85 │ ├── 01_create-index-with-mapping.bat │ ├── simple-replicaset │ │ ├── create-replicaset.bat │ │ ├── mongo-node-1.bat │ │ ├── mongo-node-2.bat │ │ ├── mongo-node-3.bat │ │ ├── start-replicaset.bat │ │ └── create-replicaset.js │ ├── 03_test-issue-85.bat │ ├── 02_create-river.bat │ ├── _02_mongodb-river-author.json │ ├── _02_mongodb-river-book.json │ ├── _03-import-document.js │ ├── _03-has-child-query.json │ ├── _01_create-index-with-mapping.json │ └── $ES_HOME │ │ └── config │ │ └── templates │ │ └── template_1.json │ ├── 87 │ ├── 01_create-river-groovy.bat │ ├── 02_test-issue-87.bat │ ├── test-import-document.js │ ├── 03_create-river-groovy-jodatime.bat │ ├── $ES_HOME │ │ └── config │ │ │ └── scripts │ │ │ ├── testscript.groovy │ │ │ └── testjodatime.groovy │ ├── mongodb-river-groovy.json │ └── mongodb-river-groovy-jodatime.json │ ├── 89 │ ├── 01_create-river.bat │ ├── $ES_HOME │ │ └── config │ │ │ └── scripts │ │ │ ├── modules │ │ │ └── helper.js │ │ │ └── testuuid.js │ ├── 02_test-issue-89.bat │ ├── test-import-document.js │ └── mongodb-river-script-filters.json │ ├── 90 │ ├── 01_create-river.bat │ ├── 02_test-issue-90.bat │ ├── test-import-document.js │ ├── $ES_HOME │ │ └── config │ │ │ └── scripts │ │ │ └── testdate.js │ └── mongodb-river-script-filters.json │ ├── 91 │ ├── 01_create-river.bat │ ├── test-document.pdf │ ├── test-issue-91.js │ ├── mongodb-river-with-gridfs.json │ └── 02_test-issue-91.bat │ ├── 95 │ ├── document-1.json │ ├── 01_create-river.bat │ ├── file.json │ ├── 02_test-issue-95.bat │ ├── test-chinese.json │ ├── test-issue-95.js │ ├── 01_create-river.sh │ ├── typechinese-1.json │ ├── 02_test-issue-95.sh │ ├── mongodb-simple-river.json │ ├── query-in-chinese.json │ ├── _query-in-chinese.json │ └── index-mapping.json │ ├── 97 │ ├── 01_create-river.bat │ ├── 02_test-issue-97.bat │ ├── mongodb-river-simple.json │ └── test-import-document.js │ ├── 101 │ ├── 01_create-river.bat │ ├── test-issue-101.js │ ├── 02_test-issue-101.bat │ └── mongodb-river-simple.json │ ├── 105 │ ├── drop-collection-105.js │ ├── disabled-mapping.json │ ├── test-issue-105.js │ ├── 01_create-river.bat │ ├── 02_test-issue-105.bat │ ├── mongodb-river-simple.json │ └── custom-mapping.json │ ├── 170 │ ├── 01_create-river.bat │ ├── 02_test-issue-170.bat │ ├── simple-mongodb-river.json │ └── test-issue-170.js │ ├── 175 │ ├── 01_create-river.bat │ ├── 02_test-issue-175.bat │ ├── simple-mongodb-river.json │ └── test-issue-175.js │ ├── 177 │ ├── 01_create-river.bat │ ├── 02_test-issue-177.bat │ ├── simple-mongodb-river-import-all-collections.json │ └── test-issue-177.js │ ├── 180 │ ├── 01_create-river.bat │ ├── 02_test-issue-180.bat │ ├── mongodb-river-script-filters.json │ ├── test-import-document-02.js │ └── test-import-document-01.js │ ├── 181 │ ├── 01_create-index-with-mapping.bat │ ├── simple-replicaset │ │ ├── create-replicaset.bat │ │ ├── mongo-node-1.bat │ │ ├── mongo-node-2.bat │ │ ├── mongo-node-3.bat │ │ ├── start-replicaset.bat │ │ └── create-replicaset.js │ ├── _02_mongodb-river-author.json │ ├── 03_test-issue-181.bat │ ├── 02_create-river.bat │ ├── _02_mongodb-river-book.json │ ├── _02_mongodb-river-chapter.json │ ├── _03-find-book-parent-query.json │ ├── _03-find-chapter-parent-query.json │ ├── _01_create-index-with-mapping.json │ ├── $ES_HOME │ │ └── config │ │ │ └── templates │ │ │ └── template_1.json │ └── _03-import-document.js │ ├── 190 │ ├── 02_test-issue-190.bat │ └── 02-map-reduce.js │ ├── 209 │ ├── 01_create-river.bat │ ├── 02_test-import-periocially-209.bat │ ├── 02_test-issue-209.bat │ ├── mongodb-river-simple.json │ ├── test-update-document.js │ └── test-import-document.js │ └── 211 │ ├── 01_create-river.bat │ ├── 02_test-issue-211.bat │ ├── test-update-document.js │ ├── test-import-document.js │ └── mongodb-river-simple.json ├── src ├── main │ ├── resources │ │ ├── es-plugin.properties │ │ └── org │ │ │ └── elasticsearch │ │ │ └── river │ │ │ └── mongodb │ │ │ └── es-build.properties │ ├── java │ │ └── org │ │ │ └── elasticsearch │ │ │ ├── river │ │ │ └── mongodb │ │ │ │ ├── NodeLevelModule.java │ │ │ │ ├── MongoDBRiverComponent.java │ │ │ │ ├── Status.java │ │ │ │ ├── SharedContext.java │ │ │ │ ├── Operation.java │ │ │ │ ├── MongoConfig.java │ │ │ │ ├── MongoDBRiverModule.java │ │ │ │ ├── StatusChecker.java │ │ │ │ ├── util │ │ │ │ └── MongoDBRiverHelper.java │ │ │ │ └── rest │ │ │ │ ├── XContentThrowableRestResponse.java │ │ │ │ └── action │ │ │ │ └── support │ │ │ │ └── RestXContentBuilder.java │ │ │ └── plugin │ │ │ └── river │ │ │ └── mongodb │ │ │ └── MongoDBRiverPlugin.java │ └── assemblies │ │ └── plugin.xml ├── site │ ├── images │ │ └── favicon.ico │ ├── scripts │ │ └── app.js │ └── index.html └── test │ ├── java │ └── org │ │ └── elasticsearch │ │ └── river │ │ └── mongodb │ │ ├── script │ │ ├── test-simple-mongodb-document.json │ │ ├── books-mapping.json │ │ ├── test-mongodb-author-document.json │ │ ├── test-mongodb-book1-document.json │ │ ├── test-mongodb-book2-document.json │ │ ├── authors-mapping.json │ │ ├── parent-child-mapping.json │ │ ├── query-books.json │ │ └── test-mongodb-river-with-script.json │ │ ├── advanced │ │ ├── tweets-mapping.json │ │ ├── test-mongodb-river-with-advanced-transformation.json │ │ └── RiverMongoAdvancedTransformationGroovyScriptTest.java │ │ ├── simple │ │ ├── test-simple-mongodb-document.json │ │ ├── test-simple-mongodb-river.json │ │ ├── test-simple-mongodb-river-import-all-collections.json │ │ ├── test-simple-mongodb-river-store-statistics.json │ │ ├── test-simple-mongodb-river-collection-filter.json │ │ ├── test-simple-mongodb-river-skip-initial-import.json │ │ ├── test-simple-mongodb-document-with-dbref.json │ │ ├── test-simple-mongodb-river-with-type.json │ │ ├── test-simple-mongodb-river-drop-collection.json │ │ ├── test-simple-mongodb-river-exclude-fields.json │ │ ├── test-simple-mongodb-river-include-fields.json │ │ ├── test-simple-mongodb-river-include-collection.json │ │ ├── test-simple-mongodb-river-initial-timestamp.json │ │ ├── RiverMongoDropCollectionDisabledTest.java │ │ ├── RiverMongoDBGroovyTest.groovy │ │ ├── RiverMongoIndexExistsTest.java │ │ ├── RiverMongoIncludeCollectionTest.java │ │ ├── RiverMongoDbRefTest.java │ │ ├── RiverMongoCannotStopTest.java │ │ ├── RiverMongoImportAllCollectionsTest.java │ │ ├── RiverMongoStoreStatisticsTest.java │ │ ├── RiverMongoCollectionFilterTest.java │ │ ├── RiverMongoExcludeFieldsTest.java │ │ └── RiverMongoIncludeFieldsTest.java │ │ ├── gridfs │ │ ├── lorem.pdf │ │ ├── test-document.pdf │ │ ├── RiverMongoGridFSTestAbstract.java │ │ ├── test-gridfs-mongodb-river.json │ │ └── test-attachment.html │ │ ├── test-mongodb-river-simple-definition.json │ │ ├── test-exclude-fields-document.json │ │ ├── test-mongodb-river-definition-177.json │ │ ├── test-mongodb-river-definition-store-statistics.json │ │ ├── test-mongodb-river-with-authentication.json │ │ ├── embed │ │ ├── TokuRuntimeConfigBuilder.java │ │ ├── TokuMongodExecutable.java │ │ ├── TokuArtifactStoreBuilder.java │ │ ├── TokuMXStarter.java │ │ ├── TokuDownloadConfigBuilder.java │ │ ├── TokuMongodProcess.java │ │ └── TokuPaths.java │ │ ├── test-mongodb-river-definition-307.json │ │ ├── test-mongodb-river-definition-167.json │ │ ├── test-mongodb-river-definition-159.json │ │ ├── tokumx │ │ ├── RiverTokuMXTestAbstract.java │ │ └── RiverTokuMXOplogRefTest.java │ │ ├── test-mongodb-river-definition.json │ │ ├── test-mongodb-river-new-definition.json │ │ ├── test-include-fields-document-204.json │ │ └── ExcludeFieldsTest.java │ ├── scripts │ └── advanced-transformation-groovy-script.groovy │ └── resources │ ├── log4j.properties │ └── settings.yml ├── .settings ├── org.eclipse.jdt.groovy.core.prefs ├── org.eclipse.m2e.core.prefs └── org.eclipse.core.resources.prefs ├── rebuild.bat ├── readme.txt ├── install-local.sh ├── .gitignore └── test-attachment.txt /manual-testing/issues/95/document-1.json: -------------------------------------------------------------------------------- 1 | {content:"美国留给伊拉克的是个烂摊子吗"} -------------------------------------------------------------------------------- /manual-testing/issues/60/mongo-credentials.txt: -------------------------------------------------------------------------------- 1 | mongos - admin - admin - admin -------------------------------------------------------------------------------- /manual-testing/issues/105/drop-collection-105.js: -------------------------------------------------------------------------------- 1 | use mydb105 2 | db.document.drop() -------------------------------------------------------------------------------- /manual-testing/issues/105/disabled-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "index.mapper.dynamic": false 3 | } -------------------------------------------------------------------------------- /manual-testing/issues/75/disabled-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "index.mapper.dynamic": false 3 | } -------------------------------------------------------------------------------- /src/main/resources/es-plugin.properties: -------------------------------------------------------------------------------- 1 | plugin=org.elasticsearch.plugin.river.mongodb.MongoDBRiverPlugin -------------------------------------------------------------------------------- /.settings/org.eclipse.jdt.groovy.core.prefs: -------------------------------------------------------------------------------- 1 | eclipse.preferences.version=1 2 | groovy.compiler.level=20 3 | -------------------------------------------------------------------------------- /manual-testing/issues/79/test-issue-79-2.js: -------------------------------------------------------------------------------- 1 | use mydb79 2 | var o = { 3 | 'name': 'richard' 4 | } 5 | db.mycollec79.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/101/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/river101/_meta" -d @mongodb-river-simple.json -------------------------------------------------------------------------------- /manual-testing/issues/101/test-issue-101.js: -------------------------------------------------------------------------------- 1 | use mydb101 2 | var o = { 3 | 'name': 'issue101' 4 | } 5 | db.mycollec101.save(o) 6 | -------------------------------------------------------------------------------- /manual-testing/issues/170/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb170/_meta" -d @simple-mongodb-river.json -------------------------------------------------------------------------------- /manual-testing/issues/175/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb175/_meta" -d @simple-mongodb-river.json -------------------------------------------------------------------------------- /manual-testing/issues/209/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb209/_meta" -d @mongodb-river-simple.json -------------------------------------------------------------------------------- /manual-testing/issues/211/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb211/_meta" -d @mongodb-river-simple.json -------------------------------------------------------------------------------- /manual-testing/issues/26/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb1/_meta" -d @mongodb-river-simple.json -------------------------------------------------------------------------------- /manual-testing/issues/76/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/river76/_meta" -d @mongodb-river-simple.json -------------------------------------------------------------------------------- /manual-testing/issues/79/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/river79/_meta" -d @mongodb-river-simple.json -------------------------------------------------------------------------------- /manual-testing/issues/95/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/river95/_meta" -d @mongodb-simple-river.json -------------------------------------------------------------------------------- /manual-testing/issues/97/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb97/_meta" -d @mongodb-river-simple.json -------------------------------------------------------------------------------- /src/site/images/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/richardwilly98/elasticsearch-river-mongodb/HEAD/src/site/images/favicon.ico -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/script/test-simple-mongodb-document.json: -------------------------------------------------------------------------------- 1 | { 2 | name: "Richard", 3 | score: 100 4 | } -------------------------------------------------------------------------------- /manual-testing/issues/22/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb1/_meta" -d @mongodb-river-script-filters.json -------------------------------------------------------------------------------- /manual-testing/issues/54/02_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb54/_meta" -d @_02_mongodb-river-simple.json -------------------------------------------------------------------------------- /manual-testing/issues/64/01_create-index-with-mapping.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/authors" -d @_01_create-index-with-mapping.json -------------------------------------------------------------------------------- /manual-testing/issues/85/01_create-index-with-mapping.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/authors" -d @_01_create-index-with-mapping.json -------------------------------------------------------------------------------- /manual-testing/issues/87/01_create-river-groovy.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb87/_meta" -d @mongodb-river-groovy.json -------------------------------------------------------------------------------- /manual-testing/issues/91/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/river91/_meta" -d @mongodb-river-with-gridfs.json -------------------------------------------------------------------------------- /manual-testing/issues/180/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb180/_meta" -d @mongodb-river-script-filters.json -------------------------------------------------------------------------------- /manual-testing/issues/181/01_create-index-with-mapping.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/authors" -d @_01_create-index-with-mapping.json -------------------------------------------------------------------------------- /manual-testing/issues/42/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb42/_meta" -d @mongodb-river-script-filters.json -------------------------------------------------------------------------------- /manual-testing/issues/46/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb46/_meta" -d @mongodb-river-sharded-collection.json -------------------------------------------------------------------------------- /manual-testing/issues/54/01_create-index-with-mapping.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/mygeoindex" -d @_01_create-index-with-mapping.json -------------------------------------------------------------------------------- /manual-testing/issues/60/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb46/_meta" -d @mongodb-river-sharded-collection.json -------------------------------------------------------------------------------- /manual-testing/issues/89/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb89/_meta" -d @mongodb-river-script-filters.json -------------------------------------------------------------------------------- /manual-testing/issues/90/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb90/_meta" -d @mongodb-river-script-filters.json -------------------------------------------------------------------------------- /manual-testing/issues/181/simple-replicaset/create-replicaset.bat: -------------------------------------------------------------------------------- 1 | "%MONGO_HOME%\bin\mongo.exe" --host localhost --port 27017 --shell create-replicaset.js -------------------------------------------------------------------------------- /manual-testing/issues/54/simple-replicaset/create-replicaset.bat: -------------------------------------------------------------------------------- 1 | "%MONGO_HOME%\bin\mongo.exe" --host localhost --port 27017 --shell create-replicaset.js -------------------------------------------------------------------------------- /manual-testing/issues/64/simple-replicaset/create-replicaset.bat: -------------------------------------------------------------------------------- 1 | "%MONGO_HOME%\bin\mongo.exe" --host localhost --port 27017 --shell create-replicaset.js -------------------------------------------------------------------------------- /manual-testing/issues/79/test-issue-79-1.js: -------------------------------------------------------------------------------- 1 | use mydb79 2 | var o = { 3 | 'name': 'issue79' 4 | } 5 | 6 | db.mycollec79.save(o) 7 | db.mycollec79.drop() -------------------------------------------------------------------------------- /manual-testing/issues/85/simple-replicaset/create-replicaset.bat: -------------------------------------------------------------------------------- 1 | "%MONGO_HOME%\bin\mongo.exe" --host localhost --port 27017 --shell create-replicaset.js -------------------------------------------------------------------------------- /manual-testing/issues/95/file.json: -------------------------------------------------------------------------------- 1 | {"_index":"index","_type":"fulltext","_id":"1","_version":1,"exists":true, "_source" : {content:"美国留给伊拉克的是个烂摊子吗"}} -------------------------------------------------------------------------------- /src/main/resources/org/elasticsearch/river/mongodb/es-build.properties: -------------------------------------------------------------------------------- 1 | version=${project.version} 2 | hash=${buildNumber} 3 | timestamp=${timestamp} 4 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/script/books-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "book": { 3 | "_parent": { 4 | "type": "author" 5 | } 6 | } 7 | } -------------------------------------------------------------------------------- /manual-testing/issues/177/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb177/_meta" -d @simple-mongodb-river-import-all-collections.json -------------------------------------------------------------------------------- /manual-testing/issues/26/02_test-issue-26.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-issue-26.js 2 | pause 3 | curl -XGET localhost:9200/mydb/_search?q=enrollment:390 -------------------------------------------------------------------------------- /manual-testing/issues/61/01_create-river-with-admin-user.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb1/_meta" -d @secured-mongodb-river-simple.json -------------------------------------------------------------------------------- /manual-testing/issues/61/test-issue-61.js: -------------------------------------------------------------------------------- 1 | use mydb 2 | var o = 3 | { 4 | "firstName": "John-61", 5 | "lastName": "Doe-61" 6 | } 7 | 8 | db.mycollec.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/76/02_test-issue-76.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-issue-76.js 2 | pause 3 | curl -XGET localhost:9200/mydb76/_search?q=enrollment:390 -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/advanced/tweets-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "tweet": { 3 | "_parent": { 4 | "type": "author" 5 | } 6 | } 7 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-document.json: -------------------------------------------------------------------------------- 1 | { 2 | name: "Richard", 3 | chinese: "中国菜很好吃。", 4 | score: 100 5 | } -------------------------------------------------------------------------------- /manual-testing/issues/91/test-document.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/richardwilly98/elasticsearch-river-mongodb/HEAD/manual-testing/issues/91/test-document.pdf -------------------------------------------------------------------------------- /manual-testing/issues/22/02_test-issue-22.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-issue-22.js 2 | pause 3 | curl -XGET localhost:9200/mydb/_search?q=title:Developer 4 | pause -------------------------------------------------------------------------------- /manual-testing/issues/61/05_create-river-with-local-user.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb1/_meta" -d @secured-mongodb-river-simple-local-user.json -------------------------------------------------------------------------------- /manual-testing/issues/89/$ES_HOME/config/scripts/modules/helper.js: -------------------------------------------------------------------------------- 1 | // export function 2 | exports.concatString = function(a, b) { 3 | return a + ' ' + b; 4 | }; 5 | -------------------------------------------------------------------------------- /manual-testing/issues/95/02_test-issue-95.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo mydb95 < test-issue-95.js 2 | pause 3 | curl -XGET localhost:9200/mydb95/_search?q=content:test91 -------------------------------------------------------------------------------- /manual-testing/issues/95/test-chinese.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "中华料理 Chinese Cuisine", 3 | "tags": ["中国", "食物", "Chinese", "food"], 4 | "content": "中国菜很好吃。" 5 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/script/test-mongodb-author-document.json: -------------------------------------------------------------------------------- 1 | { 2 | "_id": "herge", 3 | "name": "Herge", 4 | "nationality": "Belge" 5 | } -------------------------------------------------------------------------------- /.settings/org.eclipse.m2e.core.prefs: -------------------------------------------------------------------------------- 1 | #Tue Jan 17 08:08:21 EST 2012 2 | activeProfiles= 3 | eclipse.preferences.version=1 4 | resolveWorkspaceProjects=true 5 | version=1 6 | -------------------------------------------------------------------------------- /manual-testing/issues/101/02_test-issue-101.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-issue-101.js 2 | pause 3 | curl -XGET localhost:9200/mydb101/_search?q=collection:mycollec101 -------------------------------------------------------------------------------- /manual-testing/issues/75/02_test-issue-75.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-issue-75.js 2 | pause 3 | curl -XGET localhost:9200/index75/_search?pretty=true&q=firstName:John -------------------------------------------------------------------------------- /manual-testing/issues/91/test-issue-91.js: -------------------------------------------------------------------------------- 1 | use mydb91 2 | var doc = db.fs.files.findOne() 3 | db.fs.files.update({"_id": doc._id}, {$set: {"metadata.titleDoc":"test91"}}) 4 | -------------------------------------------------------------------------------- /manual-testing/issues/170/02_test-issue-170.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-issue-170.js 2 | pause 3 | curl -XGET localhost:9200/maindb/venue/_search?q=title:Developer 4 | pause -------------------------------------------------------------------------------- /manual-testing/issues/175/02_test-issue-175.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-issue-175.js 2 | pause 3 | curl -XGET localhost:9200/maindb/venue/_search?q=title:Developer 4 | pause -------------------------------------------------------------------------------- /manual-testing/issues/61/03_create-river-with-local-and-mydb-user.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb1/_meta" -d @secured-mongodb-river-simple-local-mydb-user.json -------------------------------------------------------------------------------- /manual-testing/issues/76/test-issue-76.js: -------------------------------------------------------------------------------- 1 | use mydb76 2 | var o = 3 | { 4 | "exclude-1": "xxx", 5 | "exclude-2": "yyy", 6 | "enrollment": "390" 7 | } 8 | 9 | db.mycollec76.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/87/02_test-issue-87.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-import-document.js 2 | pause 3 | curl -XGET localhost:9200/mydb-87/_search?q=firstName:John 4 | pause 5 | -------------------------------------------------------------------------------- /manual-testing/issues/89/02_test-issue-89.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-import-document.js 2 | pause 3 | curl -XGET localhost:9200/mydb-89/_search?q=firstName:John 4 | pause 5 | -------------------------------------------------------------------------------- /manual-testing/issues/90/02_test-issue-90.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-import-document.js 2 | pause 3 | curl -XGET localhost:9200/mydb-90/_search?q=firstName:John 4 | pause 5 | -------------------------------------------------------------------------------- /manual-testing/issues/61/02_test-issue-61.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo admin -u admin -p admin < test-issue-61.js 2 | pause 3 | curl -XGET localhost:9200/mydb/_search?q=firstName:John-61 4 | -------------------------------------------------------------------------------- /manual-testing/issues/95/test-issue-95.js: -------------------------------------------------------------------------------- 1 | db.mycollection95.save({ 2 | "title": "中华料理 Chinese Cuisine", 3 | "tags": ["中国", "食物", "Chinese", "food"], 4 | "content": "中国菜很好吃。" 5 | }); -------------------------------------------------------------------------------- /manual-testing/issues/46/test-issue-42-update-document.js: -------------------------------------------------------------------------------- 1 | use mydb 2 | var o = db.mycollec.findOne({"firstName": "John42", "lastName": "Doe42"}) 3 | o.state = 'CLOSED'; 4 | db.mycollec.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/60/test-issue-60-update-document.js: -------------------------------------------------------------------------------- 1 | use mydb 2 | var o = db.mycollec.findOne({"firstName": "John42", "lastName": "Doe42"}) 3 | o.state = 'CLOSED'; 4 | db.mycollec.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/87/test-import-document.js: -------------------------------------------------------------------------------- 1 | use mydb87 2 | var o = 3 | { 4 | "firstName": "John", 5 | "lastName": "Doe", 6 | "created": new Date() 7 | } 8 | 9 | db.mycollec87.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/90/test-import-document.js: -------------------------------------------------------------------------------- 1 | use mydb90 2 | var o = 3 | { 4 | "firstName": "John", 5 | "lastName": "Doe", 6 | "created": new Date() 7 | } 8 | 9 | db.mycollec90.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/97/02_test-issue-97.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-import-document.js 2 | pause 3 | curl -XGET "localhost:9200/mydb-97/_search?pretty=true&q=scores:89" 4 | pause 5 | -------------------------------------------------------------------------------- /manual-testing/issues/90/$ES_HOME/config/scripts/testdate.js: -------------------------------------------------------------------------------- 1 | ctx.document.modified = ctx.document.created; 2 | ctx.document.created = ctx.document.created.getTime(); 3 | ctx.document.flag = true; -------------------------------------------------------------------------------- /manual-testing/issues/61/04_test-issue-61.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo --port 27018 local -u local -p local < test-issue-61.js 2 | pause 3 | curl -XGET localhost:9200/mydb61/_search?q=firstName:John-61 4 | -------------------------------------------------------------------------------- /manual-testing/issues/61/06_test-issue-61.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo --port 27018 local -u local -p local < test-issue-61.js 2 | pause 3 | curl -XGET localhost:9200/mydb61/_search?q=firstName:John-61 4 | -------------------------------------------------------------------------------- /manual-testing/issues/87/03_create-river-groovy-jodatime.bat: -------------------------------------------------------------------------------- 1 | REM Copy joda-time-x-y.jar in $ES_HOME\lib 2 | curl -XPUT "http://localhost:9200/_river/mongodb87/_meta" -d @mongodb-river-groovy-jodatime.json -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/gridfs/lorem.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/richardwilly98/elasticsearch-river-mongodb/HEAD/src/test/java/org/elasticsearch/river/mongodb/gridfs/lorem.pdf -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/script/test-mongodb-book1-document.json: -------------------------------------------------------------------------------- 1 | { 2 | "_parentId": "herge", 3 | "name": "Titin au Congo", 4 | "genre": "Bande dessinee", 5 | "publisher": "Herge" 6 | } -------------------------------------------------------------------------------- /manual-testing/issues/54/03_test-issue-54.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < _03-import-document.js 2 | pause 3 | curl -XPOST "localhost:9200/mygeoindex/_search?pretty=true" -d @_03-geo-distance-query.json 4 | pause 5 | -------------------------------------------------------------------------------- /manual-testing/issues/64/03_test-issue-64.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < _03-import-document.js 2 | pause 3 | curl -XPOST "localhost:9200/authors/author/_search?pretty=true" -d @_03-has-child-query.json 4 | pause 5 | -------------------------------------------------------------------------------- /manual-testing/issues/79/02_test-issue-79.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-issue-79-1.js 2 | pause 3 | %MONGO_HOME%\bin\mongo < test-issue-79-2.js 4 | pause 5 | curl -XGET localhost:9200/mydb79/_search?q=name:richard -------------------------------------------------------------------------------- /manual-testing/issues/85/03_test-issue-85.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < _03-import-document.js 2 | pause 3 | curl -XPOST "localhost:9200/authors/author/_search?pretty=true" -d @_03-has-child-query.json 4 | pause 5 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/gridfs/test-document.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/richardwilly98/elasticsearch-river-mongodb/HEAD/src/test/java/org/elasticsearch/river/mongodb/gridfs/test-document.pdf -------------------------------------------------------------------------------- /manual-testing/issues/46/test-issue-42-import-document.js: -------------------------------------------------------------------------------- 1 | use mydb 2 | var o = 3 | { 4 | "firstName": "John42", 5 | "lastName": "Doe42", 6 | "age": 34, 7 | "state": "OPENED" 8 | } 9 | 10 | db.mycollec.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/60/test-issue-60-import-document.js: -------------------------------------------------------------------------------- 1 | use mydb 2 | var o = 3 | { 4 | "firstName": "John42", 5 | "lastName": "Doe42", 6 | "age": 34, 7 | "state": "OPENED" 8 | } 9 | 10 | db.mycollec.save(o) -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/script/test-mongodb-book2-document.json: -------------------------------------------------------------------------------- 1 | { 2 | "_parentId": "herge", 3 | "name": "Les bijoux de la Castafiore", 4 | "genre": "Bande dessinee", 5 | "publisher": "Herge" 6 | } -------------------------------------------------------------------------------- /manual-testing/issues/181/simple-replicaset/mongo-node-1.bat: -------------------------------------------------------------------------------- 1 | SET DB_PATH="D:\data\mongodb\rep1-db1" 2 | mkdir %DB_PATH% 3 | "%MONGO_HOME%\bin\mongod.exe" --replSet replica1 --port 27017 --dbpath %DB_PATH% --rest --oplogSize 100 -------------------------------------------------------------------------------- /manual-testing/issues/181/simple-replicaset/mongo-node-2.bat: -------------------------------------------------------------------------------- 1 | SET DB_PATH="D:\data\mongodb\rep1-db2" 2 | mkdir %DB_PATH% 3 | "%MONGO_HOME%\bin\mongod.exe" --replSet replica1 --port 27018 --dbpath %DB_PATH% --rest --oplogSize 100 -------------------------------------------------------------------------------- /manual-testing/issues/181/simple-replicaset/mongo-node-3.bat: -------------------------------------------------------------------------------- 1 | SET DB_PATH="D:\data\mongodb\rep1-db3" 2 | mkdir %DB_PATH% 3 | "%MONGO_HOME%\bin\mongod.exe" --replSet replica1 --port 27019 --dbpath %DB_PATH% --rest --oplogSize 100 -------------------------------------------------------------------------------- /manual-testing/issues/54/_04-add-london-document-in-index.json: -------------------------------------------------------------------------------- 1 | { 2 | "pin" : { 3 | "city" : "London", 4 | "location" : { 5 | "lat" : 51.5171, 6 | "lon" : 0.1062 7 | } 8 | } 9 | } -------------------------------------------------------------------------------- /manual-testing/issues/54/_04-add-paris-document-in-index.json: -------------------------------------------------------------------------------- 1 | { 2 | "pin" : { 3 | "city" : "Paris", 4 | "location" : { 5 | "lat" : 48.8742, 6 | "lon" : 2.3470 7 | } 8 | } 9 | } -------------------------------------------------------------------------------- /manual-testing/issues/54/simple-replicaset/mongo-node-1.bat: -------------------------------------------------------------------------------- 1 | SET DB_PATH="D:\data\mongodb\rep1-db1" 2 | mkdir %DB_PATH% 3 | "%MONGO_HOME%\bin\mongod.exe" --replSet replica1 --port 27017 --dbpath %DB_PATH% --rest --oplogSize 100 -------------------------------------------------------------------------------- /manual-testing/issues/54/simple-replicaset/mongo-node-2.bat: -------------------------------------------------------------------------------- 1 | SET DB_PATH="D:\data\mongodb\rep1-db2" 2 | mkdir %DB_PATH% 3 | "%MONGO_HOME%\bin\mongod.exe" --replSet replica1 --port 27018 --dbpath %DB_PATH% --rest --oplogSize 100 -------------------------------------------------------------------------------- /manual-testing/issues/54/simple-replicaset/mongo-node-3.bat: -------------------------------------------------------------------------------- 1 | SET DB_PATH="D:\data\mongodb\rep1-db3" 2 | mkdir %DB_PATH% 3 | "%MONGO_HOME%\bin\mongod.exe" --replSet replica1 --port 27019 --dbpath %DB_PATH% --rest --oplogSize 100 -------------------------------------------------------------------------------- /manual-testing/issues/64/simple-replicaset/mongo-node-1.bat: -------------------------------------------------------------------------------- 1 | SET DB_PATH="D:\data\mongodb\rep1-db1" 2 | mkdir %DB_PATH% 3 | "%MONGO_HOME%\bin\mongod.exe" --replSet replica1 --port 27017 --dbpath %DB_PATH% --rest --oplogSize 100 -------------------------------------------------------------------------------- /manual-testing/issues/64/simple-replicaset/mongo-node-2.bat: -------------------------------------------------------------------------------- 1 | SET DB_PATH="D:\data\mongodb\rep1-db2" 2 | mkdir %DB_PATH% 3 | "%MONGO_HOME%\bin\mongod.exe" --replSet replica1 --port 27018 --dbpath %DB_PATH% --rest --oplogSize 100 -------------------------------------------------------------------------------- /manual-testing/issues/64/simple-replicaset/mongo-node-3.bat: -------------------------------------------------------------------------------- 1 | SET DB_PATH="D:\data\mongodb\rep1-db3" 2 | mkdir %DB_PATH% 3 | "%MONGO_HOME%\bin\mongod.exe" --replSet replica1 --port 27019 --dbpath %DB_PATH% --rest --oplogSize 100 -------------------------------------------------------------------------------- /manual-testing/issues/85/simple-replicaset/mongo-node-1.bat: -------------------------------------------------------------------------------- 1 | SET DB_PATH="D:\data\mongodb\rep1-db1" 2 | mkdir %DB_PATH% 3 | "%MONGO_HOME%\bin\mongod.exe" --replSet replica1 --port 27017 --dbpath %DB_PATH% --rest --oplogSize 100 -------------------------------------------------------------------------------- /manual-testing/issues/85/simple-replicaset/mongo-node-2.bat: -------------------------------------------------------------------------------- 1 | SET DB_PATH="D:\data\mongodb\rep1-db2" 2 | mkdir %DB_PATH% 3 | "%MONGO_HOME%\bin\mongod.exe" --replSet replica1 --port 27018 --dbpath %DB_PATH% --rest --oplogSize 100 -------------------------------------------------------------------------------- /manual-testing/issues/85/simple-replicaset/mongo-node-3.bat: -------------------------------------------------------------------------------- 1 | SET DB_PATH="D:\data\mongodb\rep1-db3" 2 | mkdir %DB_PATH% 3 | "%MONGO_HOME%\bin\mongod.exe" --replSet replica1 --port 27019 --dbpath %DB_PATH% --rest --oplogSize 100 -------------------------------------------------------------------------------- /manual-testing/issues/95/01_create-river.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 4 | 5 | curl -XPUT "http://localhost:9200/_river/river95/_meta" -d @"${DIR}/mongodb-simple-river.json" 6 | -------------------------------------------------------------------------------- /manual-testing/issues/177/02_test-issue-177.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-issue-177.js 2 | pause 3 | curl -XGET localhost:9200/mydb177/mycollect1/_count 4 | pause 5 | curl -XGET localhost:9200/mydb177/mycollect2/_count 6 | pause -------------------------------------------------------------------------------- /manual-testing/issues/209/02_test-import-periocially-209.bat: -------------------------------------------------------------------------------- 1 | echo. 2 | :LOOP 3 | echo. 4 | %MONGO_HOME%\bin\mongo < test-import-document.js 5 | echo Waiting For 5 minutes... 6 | TIMEOUT /T 180 /NOBREAK 7 | echo. 8 | GOTO LOOP 9 | -------------------------------------------------------------------------------- /manual-testing/issues/54/_03-import-document.js: -------------------------------------------------------------------------------- 1 | use mydb 2 | var c = 3 | { 4 | "city": "Saint Malo", 5 | "location" : { 6 | "lat" : 48.6333, 7 | "lon" : 2 8 | } 9 | } 10 | 11 | db.mygeocollec54.save(c) -------------------------------------------------------------------------------- /manual-testing/issues/54/_04-add-nyc-document-in-index.json: -------------------------------------------------------------------------------- 1 | { 2 | "pin" : { 3 | "city" : "New York City", 4 | "location" : { 5 | "lat" : 40.7142, 6 | "lon" : 74.0064 7 | } 8 | } 9 | } -------------------------------------------------------------------------------- /manual-testing/issues/64/02_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/authors54/_meta" -d @_02_mongodb-river-author.json 2 | PAUSE 3 | curl -XPUT "http://localhost:9200/_river/books54/_meta" -d @_02_mongodb-river-book.json -------------------------------------------------------------------------------- /manual-testing/issues/85/02_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/authors85/_meta" -d @_02_mongodb-river-author.json 2 | PAUSE 3 | curl -XPUT "http://localhost:9200/_river/books85/_meta" -d @_02_mongodb-river-book.json -------------------------------------------------------------------------------- /manual-testing/issues/26/mongodb-river-simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb", 5 | "collection": "mycollec" 6 | }, 7 | "index": { 8 | "name": "mydb", 9 | "type": "mycollec" 10 | } 11 | } -------------------------------------------------------------------------------- /manual-testing/issues/64/_02_mongodb-river-author.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb", 5 | "collection": "authors" 6 | }, 7 | "index": { 8 | "name": "authors", 9 | "type": "author" 10 | } 11 | } -------------------------------------------------------------------------------- /manual-testing/issues/95/typechinese-1.json: -------------------------------------------------------------------------------- 1 | {"_index":"testindex","_type":"typechinese","_id":"1","_version":1,"exists":true, "_source" : { "title": "中华料理 Chinese Cuisine", "tags": ["中国", "食物", "Chinese", "food"], "content": "中国菜很好吃。"}} -------------------------------------------------------------------------------- /manual-testing/issues/181/_02_mongodb-river-author.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb181", 5 | "collection": "authors" 6 | }, 7 | "index": { 8 | "name": "authors", 9 | "type": "author" 10 | } 11 | } -------------------------------------------------------------------------------- /manual-testing/issues/75/mongodb-river-simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb75", 5 | "collection": "document" 6 | }, 7 | "index": { 8 | "name": "index75", 9 | "type": "document" 10 | } 11 | } -------------------------------------------------------------------------------- /manual-testing/issues/75/test-issue-75.js: -------------------------------------------------------------------------------- 1 | use mydb75 2 | var o = 3 | { 4 | "firstName": "John", 5 | "lastName": "Doe", 6 | "nickName": "The Boss", 7 | "creationDate": "2013-06-11T12:00:00.0000-04:00" 8 | } 9 | 10 | db.document.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/85/_02_mongodb-river-author.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb85", 5 | "collection": "authors" 6 | }, 7 | "index": { 8 | "name": "authors", 9 | "type": "author" 10 | } 11 | } -------------------------------------------------------------------------------- /.settings/org.eclipse.core.resources.prefs: -------------------------------------------------------------------------------- 1 | eclipse.preferences.version=1 2 | encoding//src/main/java=UTF-8 3 | encoding//src/main/resources=UTF-8 4 | encoding//src/test/java=UTF-8 5 | encoding//src/test/resources=UTF-8 6 | encoding/=UTF-8 7 | -------------------------------------------------------------------------------- /manual-testing/issues/105/test-issue-105.js: -------------------------------------------------------------------------------- 1 | use mydb105 2 | var o = 3 | { 4 | "firstName": "John", 5 | "lastName": "Doe", 6 | "nickName": "The Boss", 7 | "creationDate": "2013-06-11T12:00:00.0000-04:00" 8 | } 9 | 10 | db.document.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/54/_02_mongodb-river-simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb", 5 | "collection": "mygeocollec54" 6 | }, 7 | "index": { 8 | "name": "mygeoindex", 9 | "type": "pin" 10 | } 11 | } -------------------------------------------------------------------------------- /manual-testing/issues/95/02_test-issue-95.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 4 | 5 | mongo mydb95 < "${DIR}/test-issue-95.js" 6 | sleep 1 7 | curl -XGET localhost:9200/mydb95/_search?q=content:test91 8 | -------------------------------------------------------------------------------- /manual-testing/issues/97/mongodb-river-simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb97", 5 | "collection": "mycollec97" 6 | }, 7 | "index": { 8 | "name": "mydb-97", 9 | "type": "mycollec-97" 10 | } 11 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/script/authors-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "author": { 3 | "properties": { 4 | "name": { 5 | "type": "string" 6 | }, 7 | "nationality": { 8 | "type": "string" 9 | } 10 | } 11 | } 12 | } -------------------------------------------------------------------------------- /manual-testing/issues/181/simple-replicaset/start-replicaset.bat: -------------------------------------------------------------------------------- 1 | start "replica node #1" mongo-node-1.bat 2 | start "replica node #2" mongo-node-2.bat 3 | start "replica node #3" mongo-node-3.bat 4 | sleep 10 5 | REM start "mongos - port 37017" mongos-node-0.bat -------------------------------------------------------------------------------- /manual-testing/issues/54/simple-replicaset/start-replicaset.bat: -------------------------------------------------------------------------------- 1 | start "replica node #1" mongo-node-1.bat 2 | start "replica node #2" mongo-node-2.bat 3 | start "replica node #3" mongo-node-3.bat 4 | sleep 10 5 | REM start "mongos - port 37017" mongos-node-0.bat -------------------------------------------------------------------------------- /manual-testing/issues/64/simple-replicaset/start-replicaset.bat: -------------------------------------------------------------------------------- 1 | start "replica node #1" mongo-node-1.bat 2 | start "replica node #2" mongo-node-2.bat 3 | start "replica node #3" mongo-node-3.bat 4 | sleep 10 5 | REM start "mongos - port 37017" mongos-node-0.bat -------------------------------------------------------------------------------- /manual-testing/issues/85/simple-replicaset/start-replicaset.bat: -------------------------------------------------------------------------------- 1 | start "replica node #1" mongo-node-1.bat 2 | start "replica node #2" mongo-node-2.bat 3 | start "replica node #3" mongo-node-3.bat 4 | sleep 10 5 | REM start "mongos - port 37017" mongos-node-0.bat -------------------------------------------------------------------------------- /manual-testing/issues/89/test-import-document.js: -------------------------------------------------------------------------------- 1 | use mydb89 2 | var o = 3 | { 4 | "firstName": "John", 5 | "lastName": "Doe", 6 | "created": new Date(), 7 | "customerId": new BinData(3, "8Fq3Wd+BGUGD2CbsA4wasg==") 8 | } 9 | 10 | db.mycollec89.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/91/mongodb-river-with-gridfs.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb91", 5 | "collection": "fs", 6 | gridfs: "true" 7 | }, 8 | "index": { 9 | "name": "mydb91", 10 | "type": "type91" 11 | } 12 | } -------------------------------------------------------------------------------- /manual-testing/issues/87/$ES_HOME/config/scripts/testscript.groovy: -------------------------------------------------------------------------------- 1 | logger.debug("Incoming document: {}", ctx.document) 2 | def now = new Date() 3 | println "Now: ${now}" 4 | ctx.document.modified = now.clearTime() 5 | logger.debug("Outgoing document: {}", ctx.document) -------------------------------------------------------------------------------- /manual-testing/issues/95/mongodb-simple-river.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb95", 5 | "collection": "mycollection95", 6 | gridfs: "false" 7 | }, 8 | "index": { 9 | "name": "mydb95", 10 | "type": "type95" 11 | } 12 | } -------------------------------------------------------------------------------- /manual-testing/issues/175/simple-mongodb-river.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "maindb", 5 | "collection": "venue" 6 | }, 7 | "index": { 8 | "name": "maindb", 9 | "type": "venue" 10 | } 11 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/test-mongodb-river-simple-definition.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb", 5 | "collection": "mycollection", 6 | gridfs: false 7 | }, 8 | index: { 9 | name: "myindex" 10 | } 11 | } -------------------------------------------------------------------------------- /manual-testing/issues/89/mongodb-river-script-filters.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb89", 5 | "collection": "mycollec89", 6 | "script": "testuuid" 7 | }, 8 | "index": { 9 | "name": "mydb-89", 10 | "type": "mycollec-89" 11 | } 12 | } -------------------------------------------------------------------------------- /manual-testing/issues/90/mongodb-river-script-filters.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb90", 5 | "collection": "mycollec90", 6 | "script": "testdate" 7 | }, 8 | "index": { 9 | "name": "mydb-90", 10 | "type": "mycollec-90" 11 | } 12 | } -------------------------------------------------------------------------------- /manual-testing/issues/75/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/index75" -d @disabled-mapping.json 2 | curl -XPUT "http://localhost:9200/index75/document/_mapping" -d @custom-mapping.json 3 | curl -XPUT "http://localhost:9200/_river/river75/_meta" -d @mongodb-river-simple.json -------------------------------------------------------------------------------- /manual-testing/issues/105/01_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/mydb105" -d @disabled-mapping.json 2 | curl -XPUT "http://localhost:9200/mydb105/document/_mapping" -d @custom-mapping.json 3 | pause 4 | curl -XPUT "http://localhost:9200/_river/river105/_meta" -d @mongodb-river-simple.json -------------------------------------------------------------------------------- /manual-testing/issues/54/_03-geo-distance-query.json: -------------------------------------------------------------------------------- 1 | { 2 | "query": { 3 | "match_all": {} 4 | }, 5 | "filter": { 6 | "geo_distance": { 7 | "distance": "100km", 8 | "pin.location": { 9 | "lat": 48, 10 | "lon": 2 11 | } 12 | } 13 | } 14 | } -------------------------------------------------------------------------------- /manual-testing/issues/91/02_test-issue-91.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongofiles --host localhost:27018 --db mydb91 --collection mycollec91 --type applicaton/pdf put test-document.pdf 2 | %MONGO_HOME%\bin\mongo < test-issue-91.js 3 | pause 4 | curl -XGET localhost:9200/mydb91/_search?q=metadata.titleDoc:test91 -------------------------------------------------------------------------------- /manual-testing/issues/105/02_test-issue-105.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-issue-105.js 2 | pause 3 | curl -XGET localhost:9200/mydb105/_search?pretty=true&q=firstName:John 4 | pause 5 | %MONGO_HOME%\bin\mongo < drop-collection-105.js 6 | pause 7 | curl -XGET localhost:9200/mydb105/_mapping?pretty=true -------------------------------------------------------------------------------- /manual-testing/issues/105/mongodb-river-simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb105", 5 | "collection": "document", 6 | "options": { 7 | "drop_collection": true 8 | } 9 | }, 10 | "index": { 11 | "name": "mydb105", 12 | "type": "document" 13 | } 14 | } -------------------------------------------------------------------------------- /manual-testing/issues/46/mongodb-river-sharded-collection.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "host": "localhost", 5 | "port": "37017", 6 | "db": "mydb", 7 | "collection": "students" 8 | }, 9 | "index": { 10 | "name": "mydb46", 11 | "type": "students46" 12 | } 13 | } -------------------------------------------------------------------------------- /manual-testing/issues/79/mongodb-river-simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb79", 5 | "collection": "mycollec79", 6 | "options": { 7 | "drop_collection": true 8 | } 9 | }, 10 | "index": { 11 | "name": "mydb79", 12 | "type": "type79" 13 | } 14 | } -------------------------------------------------------------------------------- /manual-testing/issues/87/mongodb-river-groovy.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb87", 5 | "collection": "mycollec87", 6 | "scriptType": "groovy", 7 | "script": "testscript" 8 | }, 9 | "index": { 10 | "name": "mydb-87", 11 | "type": "mycollec-87" 12 | } 13 | } -------------------------------------------------------------------------------- /manual-testing/issues/101/mongodb-river-simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb101", 5 | "collection": "mycollec101", 6 | "options": { 7 | "include_collection": "collection" 8 | } 9 | }, 10 | "index": { 11 | "name": "mydb101", 12 | "type": "type101" 13 | } 14 | } -------------------------------------------------------------------------------- /manual-testing/issues/87/$ES_HOME/config/scripts/testjodatime.groovy: -------------------------------------------------------------------------------- 1 | import org.joda.time.DateTime 2 | logger.debug("Incoming document: {}", ctx.document) 3 | def today = new DateTime() 4 | println "Today: ${today}" 5 | ctx.document.monthOfYear = today.monthOfYear 6 | logger.debug("Outgoing document: {}", ctx.document) -------------------------------------------------------------------------------- /manual-testing/issues/87/mongodb-river-groovy-jodatime.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb87", 5 | "collection": "mycollec87", 6 | "scriptType": "groovy", 7 | "script": "testjodatime" 8 | }, 9 | "index": { 10 | "name": "mydb-87", 11 | "type": "mycollec-87" 12 | } 13 | } -------------------------------------------------------------------------------- /manual-testing/issues/180/02_test-issue-180.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-import-document-01.js 2 | pause 3 | curl -XGET localhost:9200/mydb180/_search?q=firstName:John 4 | pause 5 | %MONGO_HOME%\bin\mongo < test-import-document-02.js 6 | pause 7 | curl -XGET localhost:9200/mydb180/_search?q=firstName:John 8 | pause 9 | -------------------------------------------------------------------------------- /manual-testing/issues/76/mongodb-river-simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb76", 5 | "collection": "mycollec76", 6 | "options": { 7 | "exclude_fields": ["exclude-1", "exclude-2"] 8 | } 9 | }, 10 | "index": { 11 | "name": "mydb76", 12 | "type": "type76" 13 | } 14 | } -------------------------------------------------------------------------------- /manual-testing/issues/42/02_test-issue-42.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-issue-42-import-document.js 2 | pause 3 | curl -XGET localhost:9200/mydb42/_search?q=firstName:John42 4 | pause 5 | %MONGO_HOME%\bin\mongo < test-issue-42-update-document.js 6 | pause 7 | curl -XGET localhost:9200/mydb42/_search?q=firstName:John42 8 | pause -------------------------------------------------------------------------------- /manual-testing/issues/60/02_test-issue-60.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-issue-60-import-document.js 2 | pause 3 | curl -XGET localhost:9200/mydb60/_search?q=firstName:John60 4 | pause 5 | %MONGO_HOME%\bin\mongo < test-issue-60-update-document.js 6 | pause 7 | curl -XGET localhost:9200/mydb60/_search?q=firstName:John60 8 | pause -------------------------------------------------------------------------------- /manual-testing/issues/181/03_test-issue-181.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < _03-import-document.js 2 | pause 3 | curl -XPOST "localhost:9200/authors/author/_search?pretty=true" -d @_03-find-book-parent-query.json 4 | pause 5 | curl -XPOST "localhost:9200/authors/book/_search?pretty=true" -d @_03-find-chapter-parent-query.json 6 | pause 7 | -------------------------------------------------------------------------------- /manual-testing/issues/42/mongodb-river-script-filters.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb42", 5 | "collection": "mycollec42", 6 | "script": "if( ctx.document.state == 'CLOSED' ) { ctx.deleted = true; }" 7 | }, 8 | "index": { 9 | "name": "mydb42", 10 | "type": "mycollec42" 11 | } 12 | } -------------------------------------------------------------------------------- /manual-testing/issues/46/02_test-issue-42.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-issue-42-import-document.js 2 | pause 3 | curl -XGET localhost:9200/mydb-42/_search?q=firstName:John42 4 | pause 5 | %MONGO_HOME%\bin\mongo < test-issue-42-update-document.js 6 | pause 7 | curl -XGET localhost:9200/mydb-42/_search?q=firstName:John42 8 | pause -------------------------------------------------------------------------------- /manual-testing/issues/54/04_add-document-in-index.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/mygeoindex/pin/1" -d @_04-add-paris-document-in-index.json 2 | curl -XPUT "http://localhost:9200/mygeoindex/pin/2" -d @_04-add-nyc-document-in-index.json 3 | curl -XPUT "http://localhost:9200/mygeoindex/pin/3" -d @_04-add-london-document-in-index.json -------------------------------------------------------------------------------- /manual-testing/issues/180/mongodb-river-script-filters.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb180", 5 | "collection": "mycollec180", 6 | "script": "if( ctx.document.score == undefined ) { ctx.ignore = true; }" 7 | }, 8 | "index": { 9 | "name": "mydb180", 10 | "type": "mycollec180" 11 | } 12 | } -------------------------------------------------------------------------------- /manual-testing/issues/190/02_test-issue-190.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < 02-map-reduce.js 2 | pause 3 | rem curl -XPOST "localhost:9200/authors/author/_search?pretty=true" -d @_03-find-book-parent-query.json 4 | pause 5 | rem curl -XPOST "localhost:9200/authors/book/_search?pretty=true" -d @_03-find-chapter-parent-query.json 6 | pause 7 | -------------------------------------------------------------------------------- /manual-testing/issues/209/02_test-issue-209.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-import-document.js 2 | pause 3 | curl -XGET "localhost:9200/media/_search?pretty=true&q=user:joe.doe" 4 | pause 5 | %MONGO_HOME%\bin\mongo < test-update-document.js 6 | pause 7 | curl -XGET "localhost:9200/media/_search?pretty=true&q=user:joe.doe" 8 | pause 9 | -------------------------------------------------------------------------------- /manual-testing/issues/211/02_test-issue-211.bat: -------------------------------------------------------------------------------- 1 | %MONGO_HOME%\bin\mongo < test-import-document.js 2 | pause 3 | curl -XGET "localhost:9200/media/_search?pretty=true&q=user:joe.doe" 4 | pause 5 | %MONGO_HOME%\bin\mongo < test-update-document.js 6 | pause 7 | curl -XGET "localhost:9200/media/_search?pretty=true&q=user:joe.doe" 8 | pause 9 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/script/parent-child-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "author": { 3 | "properties": { 4 | "name": { 5 | "type": "string" 6 | }, 7 | "nationality": { 8 | "type": "string" 9 | } 10 | } 11 | }, 12 | "book": { 13 | "_parent": { 14 | "type": "author" 15 | } 16 | } 17 | } -------------------------------------------------------------------------------- /manual-testing/issues/181/02_create-river.bat: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/authors181/_meta" -d @_02_mongodb-river-author.json 2 | PAUSE 3 | curl -XPUT "http://localhost:9200/_river/books181/_meta" -d @_02_mongodb-river-book.json 4 | PAUSE 5 | curl -XPUT "http://localhost:9200/_river/chapter181/_meta" -d @_02_mongodb-river-chapter.json 6 | -------------------------------------------------------------------------------- /rebuild.bat: -------------------------------------------------------------------------------- 1 | SET ES_HOME=C:\Dev\elasticsearch-0.90.5 2 | SET RIVER_VERSION=1.7.2-SNAPSHOT 3 | 4 | CALL mvn clean package -Dmaven.test.skip=true 5 | CALL %ES_HOME%\bin\plugin -r elasticsearch-river-mongodb 6 | CALL %ES_HOME%\bin\plugin -i elasticsearch-river-mongodb -u file:///%CD%/target/releases/elasticsearch-river-mongodb-%RIVER_VERSION%.zip 7 | -------------------------------------------------------------------------------- /manual-testing/issues/22/mongodb-river-script-filters.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb", 5 | "collection": "mycollec", 6 | "script": "ctx.document.title = ctx.document.title_from_mongo; delete ctx.document.title_from_mongo;" 7 | }, 8 | "index": { 9 | "name": "mydb", 10 | "type": "mycollec" 11 | } 12 | } -------------------------------------------------------------------------------- /manual-testing/issues/54/_01_create-index-with-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "settings" : { 3 | "number_of_shards" : 1, 4 | "mapper" : { 5 | "dynamic": false 6 | } 7 | }, 8 | "mappings" : { 9 | "pin" : { 10 | "properties" : { 11 | "location" : { 12 | "type" : "geo_point" 13 | } 14 | } 15 | } 16 | } 17 | } -------------------------------------------------------------------------------- /manual-testing/issues/64/_02_mongodb-river-book.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb", 5 | "collection": "books", 6 | "script": "if(ctx.document._parentId) { ctx._parent = ctx.document._parentId; delete ctx.document._parentId;}" 7 | }, 8 | "index": { 9 | "name": "authors", 10 | "type": "book" 11 | } 12 | } -------------------------------------------------------------------------------- /manual-testing/issues/85/_02_mongodb-river-book.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb85", 5 | "collection": "books", 6 | "script": "if(ctx.document._parentId) { ctx._parent = ctx.document._parentId; delete ctx.document._parentId;}" 7 | }, 8 | "index": { 9 | "name": "authors", 10 | "type": "book" 11 | } 12 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/test-exclude-fields-document.json: -------------------------------------------------------------------------------- 1 | { 2 | "firstName": "John", 3 | "lastName": "Doe", 4 | "address": { 5 | "street": "Main Street", 6 | "apartment": "1A", 7 | "state": "MA", 8 | "city": "Boston", 9 | "country": "USA" 10 | }, 11 | "hobbies": [ 12 | "movie", 13 | "soccer" 14 | ] 15 | } -------------------------------------------------------------------------------- /manual-testing/issues/180/test-import-document-02.js: -------------------------------------------------------------------------------- 1 | var status = rs.status() 2 | for (i=0; i < status.members.length; i++) { 3 | if (status.members[i].state == 1) { 4 | db = connect(status.members[i].name + "/local") 5 | } 6 | } 7 | 8 | use mydb180 9 | var o = db.mycollec180.findOne() 10 | o.score = 100; 11 | 12 | db.mycollec180.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/181/_02_mongodb-river-book.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb181", 5 | "collection": "books", 6 | "script": "if(ctx.document._parentId) { ctx._parent = ctx.document._parentId; delete ctx.document._parentId;}" 7 | }, 8 | "index": { 9 | "name": "authors", 10 | "type": "book" 11 | } 12 | } -------------------------------------------------------------------------------- /manual-testing/issues/177/simple-mongodb-river-import-all-collections.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb177", 5 | "options": { 6 | "drop_collection": true, 7 | "import_all_collections": true 8 | } 9 | }, 10 | "index": { 11 | "name": "mydb177" 12 | } 13 | } -------------------------------------------------------------------------------- /manual-testing/issues/181/_02_mongodb-river-chapter.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb181", 5 | "collection": "chapters", 6 | "script": "if(ctx.document._parentId) { ctx._parent = ctx.document._parentId; delete ctx.document._parentId;}" 7 | }, 8 | "index": { 9 | "name": "authors", 10 | "type": "chapter" 11 | } 12 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/script/query-books.json: -------------------------------------------------------------------------------- 1 | { 2 | "query": { 3 | "has_child": { 4 | "type": "book", 5 | "query" : { 6 | "filtered": { 7 | "query": { "match_all": {}}, 8 | "filter" : { 9 | "and": [ 10 | {"term": {"publisher": "Herge"}} 11 | ] 12 | } 13 | } 14 | } 15 | } 16 | } 17 | } -------------------------------------------------------------------------------- /manual-testing/issues/170/simple-mongodb-river.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "db": "mydb170", 5 | "collection": "mycollection170", 6 | "options": { 7 | "store_statistics": true 8 | } 9 | }, 10 | "index": { 11 | "name": "mydb170", 12 | "type": "mycollection170" 13 | } 14 | } -------------------------------------------------------------------------------- /manual-testing/issues/209/mongodb-river-simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "mongodb": { 3 | "db": "mydb209", 4 | "options": { 5 | "secondaryread_preference": true 6 | }, 7 | "collection": "mycollec209" 8 | }, 9 | "type": "mongodb", 10 | "index": { 11 | "type": "mycollec209", 12 | "name": "mydb209", 13 | "throttle_size": 50000 14 | } 15 | } -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/river/mongodb/NodeLevelModule.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb; 2 | 3 | import org.elasticsearch.common.inject.AbstractModule; 4 | 5 | public class NodeLevelModule extends AbstractModule { 6 | @Override 7 | protected void configure() { 8 | bind(MongoClientService.class).asEagerSingleton(); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /manual-testing/issues/209/test-update-document.js: -------------------------------------------------------------------------------- 1 | var status = rs.status() 2 | for (i=0; i < status.members.length; i++) { 3 | if (status.members[i].state == 1) { 4 | db = connect(status.members[i].name + "/local") 5 | } 6 | } 7 | use mydb211 8 | var modified = new Date() 9 | db.media.update({"user": "joe.doe"}, {$set: {"deleted": true, "modified": modified}}) 10 | -------------------------------------------------------------------------------- /manual-testing/issues/211/test-update-document.js: -------------------------------------------------------------------------------- 1 | var status = rs.status() 2 | for (i=0; i < status.members.length; i++) { 3 | if (status.members[i].state == 1) { 4 | db = connect(status.members[i].name + "/local") 5 | } 6 | } 7 | use mydb211 8 | var modified = new Date() 9 | db.media.update({"user": "joe.doe"}, {$set: {"deleted": true, "modified": modified}}) 10 | -------------------------------------------------------------------------------- /manual-testing/issues/64/_03-import-document.js: -------------------------------------------------------------------------------- 1 | use mydb 2 | var author = 3 | { 4 | "_id": "herge", 5 | "name": "Herge", 6 | "nationality": "Belge" 7 | } 8 | 9 | db.authors.save(author) 10 | 11 | var book = { 12 | "_parentId": "herge", 13 | "name": "Titin au Congo", 14 | "genre": "Bande dessinee", 15 | "publisher": "Herge" 16 | } 17 | 18 | db.books.save(book) 19 | -------------------------------------------------------------------------------- /src/test/scripts/advanced-transformation-groovy-script.groovy: -------------------------------------------------------------------------------- 1 | def local = ctx.documents[0] 2 | ctx.documents = [] 3 | def doc1 = [operation: local.operation, _type: 'author', data: local.data.clone()] 4 | doc1.data.remove('tweets') 5 | ctx.documents << doc1 6 | for(tweet in local.data.tweets) { 7 | ctx.documents << [operation: 'i', _parent: local.data._id, _type: 'tweet', data: tweet] 8 | } -------------------------------------------------------------------------------- /manual-testing/issues/42/test-issue-42-update-document.js: -------------------------------------------------------------------------------- 1 | var status = rs.status() 2 | for (i=0; i < status.members.length; i++) { 3 | if (status.members[i].state == 1) { 4 | db = connect(status.members[i].name + "/local") 5 | } 6 | } 7 | 8 | use mydb42 9 | var o = db.mycollec42.findOne({"firstName": "John42", "lastName": "Doe42"}) 10 | o.state = 'CLOSED'; 11 | db.mycollec42.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/60/mongodb-river-sharded-collection.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "host": "localhost", 5 | "port": "37017", 6 | "db": "mydb", 7 | "collection": "students", 8 | "credentials": 9 | [ 10 | { db: "admin", user: "admin", password: "admin" } 11 | ] 12 | }, 13 | "index": { 14 | "name": "mydb60", 15 | "type": "students60" 16 | } 17 | } -------------------------------------------------------------------------------- /manual-testing/issues/180/test-import-document-01.js: -------------------------------------------------------------------------------- 1 | var status = rs.status() 2 | for (i=0; i < status.members.length; i++) { 3 | if (status.members[i].state == 1) { 4 | db = connect(status.members[i].name + "/local") 5 | } 6 | } 7 | 8 | use mydb180 9 | var o = 10 | { 11 | "firstName": "John", 12 | "lastName": "Doe", 13 | "created": new Date() 14 | } 15 | 16 | db.mycollec180.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/97/test-import-document.js: -------------------------------------------------------------------------------- 1 | var status = rs.status() 2 | for (i=0; i < status.members.length; i++) { 3 | if (status.members[i].state == 1) { 4 | db = connect(status.members[i].name + "/local") 5 | } 6 | } 7 | use mydb97 8 | var student = {"name": "joe", "scores": [54]} 9 | db.mycollec97.save(student) 10 | db.mycollec97.update({"name": "joe"}, {$push: {"scores": 89}}) 11 | -------------------------------------------------------------------------------- /manual-testing/issues/105/custom-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "document": { 3 | "properties": { 4 | "firstName": { 5 | "type": "string" 6 | }, 7 | "lastName": { 8 | "type": "string" 9 | }, 10 | "nickName": { 11 | "type": "string", "index":"not_analyzed" 12 | }, 13 | "creationDate": { 14 | "type": "date", 15 | "format": "dateOptionalTime" 16 | } 17 | } 18 | } 19 | } -------------------------------------------------------------------------------- /manual-testing/issues/75/custom-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "document": { 3 | "properties": { 4 | "firstName": { 5 | "type": "string" 6 | }, 7 | "lastName": { 8 | "type": "string" 9 | }, 10 | "nickName": { 11 | "type": "string", "index":"not_analyzed" 12 | }, 13 | "creationDate": { 14 | "type": "date", 15 | "format": "dateOptionalTime" 16 | } 17 | } 18 | } 19 | } -------------------------------------------------------------------------------- /manual-testing/issues/85/_03-import-document.js: -------------------------------------------------------------------------------- 1 | use mydb85 2 | 3 | var author = 4 | { 5 | "name": "Herge", 6 | "nationality": "Belge" 7 | } 8 | 9 | db.authors.save(author) 10 | 11 | var author1 = db.authors.findOne() 12 | 13 | var book = { 14 | "_parentId": author1._id, 15 | "name": "Titin au Congo", 16 | "genre": "Bande dessinee", 17 | "publisher": "Herge" 18 | } 19 | 20 | db.books.save(book) -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/river/mongodb/MongoDBRiverComponent.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb; 2 | 3 | import org.elasticsearch.river.AbstractRiverComponent; 4 | 5 | public abstract class MongoDBRiverComponent extends AbstractRiverComponent { 6 | 7 | protected MongoDBRiverComponent(MongoDBRiver river) { 8 | super(river.riverName(), river.settings()); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /manual-testing/issues/175/test-issue-175.js: -------------------------------------------------------------------------------- 1 | var status = rs.status() 2 | for (i=0; i < status.members.length; i++) { 3 | if (status.members[i].state == 1) { 4 | db = connect(status.members[i].name + "/local") 5 | } 6 | } 7 | 8 | use maindb 9 | var o = 10 | { 11 | "firstName": "John", 12 | "lastName": "Doe", 13 | "age": 34, 14 | "title_from_mongo": "Developer" 15 | } 16 | 17 | db.venue.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/22/test-issue-22.js: -------------------------------------------------------------------------------- 1 | var status = rs.status() 2 | for (i=0; i < status.members.length; i++) { 3 | if (status.members[i].state == 1) { 4 | db = connect(status.members[i].name + "/local") 5 | } 6 | } 7 | 8 | use mydb 9 | var o = 10 | { 11 | "firstName": "John", 12 | "lastName": "Doe", 13 | "age": 34, 14 | "title_from_mongo": "Developer" 15 | } 16 | 17 | db.mycollec.save(o) -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/gridfs/RiverMongoGridFSTestAbstract.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb.gridfs; 2 | 3 | import org.elasticsearch.river.mongodb.RiverMongoDBTestAbstract; 4 | 5 | public abstract class RiverMongoGridFSTestAbstract extends RiverMongoDBTestAbstract { 6 | 7 | protected RiverMongoGridFSTestAbstract() { 8 | super(true); 9 | } 10 | 11 | } 12 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/test-mongodb-river-definition-177.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [ 5 | {host: "127.0.0.1"}, 6 | {host: "localhost"} 7 | ], 8 | db: "mydatabase", 9 | "options": { 10 | "drop_collection": true, 11 | "import_all_collections": true 12 | } 13 | }, 14 | index: { 15 | name: "myindex" 16 | } 17 | } -------------------------------------------------------------------------------- /manual-testing/issues/95/query-in-chinese.json: -------------------------------------------------------------------------------- 1 | { 2 | "query": { 3 | "has_child": { 4 | "type": "book", 5 | "query": { 6 | "filtered": { 7 | "query": { 8 | "match_all": {} 9 | }, 10 | "filter": { 11 | "term": { 12 | "publisher": "Herge" 13 | } 14 | } 15 | } 16 | } 17 | } 18 | } 19 | } -------------------------------------------------------------------------------- /manual-testing/issues/42/test-issue-42-import-document.js: -------------------------------------------------------------------------------- 1 | var status = rs.status() 2 | for (i=0; i < status.members.length; i++) { 3 | if (status.members[i].state == 1) { 4 | db = connect(status.members[i].name + "/local") 5 | } 6 | } 7 | 8 | use mydb42 9 | var o = 10 | { 11 | "firstName": "John42", 12 | "lastName": "Doe42", 13 | "age": 34, 14 | "state": "OPENED" 15 | } 16 | 17 | db.mycollec42.save(o) -------------------------------------------------------------------------------- /manual-testing/issues/64/_03-has-child-query.json: -------------------------------------------------------------------------------- 1 | { 2 | "query": { 3 | "has_child": { 4 | "type": "book", 5 | "query": { 6 | "filtered": { 7 | "query": { 8 | "match_all": {} 9 | }, 10 | "filter": { 11 | "term": { 12 | "publisher": "Herge" 13 | } 14 | } 15 | } 16 | } 17 | } 18 | } 19 | } -------------------------------------------------------------------------------- /manual-testing/issues/85/_03-has-child-query.json: -------------------------------------------------------------------------------- 1 | { 2 | "query": { 3 | "has_child": { 4 | "type": "book", 5 | "query": { 6 | "filtered": { 7 | "query": { 8 | "match_all": {} 9 | }, 10 | "filter": { 11 | "term": { 12 | "publisher": "Herge" 13 | } 14 | } 15 | } 16 | } 17 | } 18 | } 19 | } -------------------------------------------------------------------------------- /manual-testing/issues/95/_query-in-chinese.json: -------------------------------------------------------------------------------- 1 | { 2 | "query": { 3 | "has_child": { 4 | "type": "book", 5 | "query": { 6 | "filtered": { 7 | "query": { 8 | "match_all": {} 9 | }, 10 | "filter": { 11 | "term": { 12 | "publisher": "Herge" 13 | } 14 | } 15 | } 16 | } 17 | } 18 | } 19 | } -------------------------------------------------------------------------------- /manual-testing/issues/54/simple-replicaset/create-replicaset.js: -------------------------------------------------------------------------------- 1 | var adminDb; 2 | function createReplicaset() { 3 | var config = { _id: "replica1", members:[{ _id : 0, host : "localhost:27017" }, { _id : 1, host : "localhost:27018" }, { _id : 2, host : "localhost:27019" }]}; 4 | var cmd = {"replSetInitiate": config}; 5 | adminDb.runCommand(cmd); 6 | //rs.status(); 7 | } 8 | adminDb = db.getSiblingDB("admin"); 9 | createReplicaset(); -------------------------------------------------------------------------------- /manual-testing/issues/64/simple-replicaset/create-replicaset.js: -------------------------------------------------------------------------------- 1 | var adminDb; 2 | function createReplicaset() { 3 | var config = { _id: "replica1", members:[{ _id : 0, host : "localhost:27017" }, { _id : 1, host : "localhost:27018" }, { _id : 2, host : "localhost:27019" }]}; 4 | var cmd = {"replSetInitiate": config}; 5 | adminDb.runCommand(cmd); 6 | //rs.status(); 7 | } 8 | adminDb = db.getSiblingDB("admin"); 9 | createReplicaset(); -------------------------------------------------------------------------------- /manual-testing/issues/85/simple-replicaset/create-replicaset.js: -------------------------------------------------------------------------------- 1 | var adminDb; 2 | function createReplicaset() { 3 | var config = { _id: "replica1", members:[{ _id : 0, host : "localhost:27017" }, { _id : 1, host : "localhost:27018" }, { _id : 2, host : "localhost:27019" }]}; 4 | var cmd = {"replSetInitiate": config}; 5 | adminDb.runCommand(cmd); 6 | //rs.status(); 7 | } 8 | adminDb = db.getSiblingDB("admin"); 9 | createReplicaset(); -------------------------------------------------------------------------------- /manual-testing/issues/181/_03-find-book-parent-query.json: -------------------------------------------------------------------------------- 1 | { 2 | "query": { 3 | "has_child": { 4 | "type": "book", 5 | "query": { 6 | "filtered": { 7 | "query": { 8 | "match_all": {} 9 | }, 10 | "filter": { 11 | "term": { 12 | "publisher": "herge" 13 | } 14 | } 15 | } 16 | } 17 | } 18 | } 19 | } -------------------------------------------------------------------------------- /manual-testing/issues/181/simple-replicaset/create-replicaset.js: -------------------------------------------------------------------------------- 1 | var adminDb; 2 | function createReplicaset() { 3 | var config = { _id: "replica1", members:[{ _id : 0, host : "localhost:27017" }, { _id : 1, host : "localhost:27018" }, { _id : 2, host : "localhost:27019" }]}; 4 | var cmd = {"replSetInitiate": config}; 5 | adminDb.runCommand(cmd); 6 | //rs.status(); 7 | } 8 | adminDb = db.getSiblingDB("admin"); 9 | createReplicaset(); -------------------------------------------------------------------------------- /manual-testing/issues/181/_03-find-chapter-parent-query.json: -------------------------------------------------------------------------------- 1 | { 2 | "query": { 3 | "has_child": { 4 | "type": "chapter", 5 | "query": { 6 | "filtered": { 7 | "query": { 8 | "match_all": {} 9 | }, 10 | "filter": { 11 | "term": { 12 | "title": "introduction" 13 | } 14 | } 15 | } 16 | } 17 | } 18 | } 19 | } -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/river/mongodb/Status.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb; 2 | 3 | public enum Status { 4 | 5 | UNKNOWN, 6 | /** River should be started */ 7 | START_PENDING, 8 | /** River is actually starting up */ 9 | STARTING, 10 | START_FAILED, 11 | RUNNING, 12 | STOPPED, 13 | IMPORT_FAILED, 14 | INITIAL_IMPORT_FAILED, 15 | SCRIPT_IMPORT_FAILED, 16 | RIVER_STALE; 17 | 18 | } 19 | -------------------------------------------------------------------------------- /manual-testing/issues/211/test-import-document.js: -------------------------------------------------------------------------------- 1 | var status = rs.status() 2 | for (i=0; i < status.members.length; i++) { 3 | if (status.members[i].state == 1) { 4 | db = connect(status.members[i].name + "/local") 5 | } 6 | } 7 | use mydb211 8 | var created = new Date() 9 | var item = {"user": "joe.doe", "scores": [54], "deleted": false, "created": created, "modified": created, "meta": ["tag1", "tag2"], "status": "A", "dummy": 1} 10 | db.media.save(item) 11 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/test-mongodb-river-definition-store-statistics.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [ 5 | {host: "127.0.0.1"}, 6 | {host: "localhost"} 7 | ], 8 | db: "mydatabase", 9 | "options": { 10 | "store_statistics": { 11 | "index": "archive-stats", 12 | "type": "dummy-stats" 13 | } 14 | } 15 | }, 16 | index: { 17 | name: "myindex" 18 | } 19 | } -------------------------------------------------------------------------------- /manual-testing/issues/64/_01_create-index-with-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "settings" : { 3 | "number_of_shards" : 1, 4 | "mapper" : { 5 | "dynamic": false 6 | } 7 | }, 8 | "mappings" : { 9 | "author": { 10 | "properties": { 11 | "name": { 12 | "type": "string" 13 | }, 14 | "nationality": { 15 | "type": "string" 16 | } 17 | } 18 | }, 19 | "book": { 20 | "_parent": { 21 | "type": "author" 22 | } 23 | } 24 | } 25 | } -------------------------------------------------------------------------------- /manual-testing/issues/85/_01_create-index-with-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "settings" : { 3 | "number_of_shards" : 1, 4 | "mapper" : { 5 | "dynamic": false 6 | } 7 | }, 8 | "mappings" : { 9 | "author": { 10 | "properties": { 11 | "name": { 12 | "type": "string" 13 | }, 14 | "nationality": { 15 | "type": "string" 16 | } 17 | } 18 | }, 19 | "book": { 20 | "_parent": { 21 | "type": "author" 22 | } 23 | } 24 | } 25 | } -------------------------------------------------------------------------------- /readme.txt: -------------------------------------------------------------------------------- 1 | curl -XPUT "http://localhost:9200/_river/mongodb/_meta" -d "{ \"type\": \"mongodb\", \"mongodb\":{ \"db\": \"DATABASE_NAME\", \"collection\": \"COLLECTION\", \"index\": \"ES_INDEX_NAME\" }}" 2 | 3 | Example: 4 | curl -XPUT "http://localhost:9200/_river/mongodb/_meta" -d "{ \"type\": \"mongodb\", \"mongodb\":{ \"db\": \"testsbp\", \"collection\": \"person\", \"index\": \"personindex\" }}" 5 | 6 | Query index: 7 | curl -XGET "http://localhost:9200/testsbp/_search?q=firstName:Richard" -------------------------------------------------------------------------------- /manual-testing/issues/211/mongodb-river-simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "mongodb": { 3 | "db": "mydb211", 4 | "options": { 5 | "secondaryread_preference": true, 6 | "include_fields": ["deleted", "user", "created", "modified", "meta", "assets", "status"] 7 | }, 8 | "collection": "media" 9 | }, 10 | "type": "mongodb", 11 | "index": { 12 | "bulk": { 13 | "concurrent_requests": 1 14 | }, 15 | "type": "media", 16 | "name": "media" 17 | } 18 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-river.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | "host": "localhost", 6 | "port": %s 7 | }, 8 | { 9 | "host": "localhost", 10 | "port": %s 11 | }, 12 | { 13 | "host": "localhost", 14 | "port": %s 15 | }], 16 | "db": "%s", 17 | "collection": "%s", 18 | "gridfs": false 19 | }, 20 | "index": { 21 | "name": "%s", 22 | "throttle_size": 2000 23 | } 24 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-river-import-all-collections.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | "host": "localhost", 6 | "port": %s 7 | }, 8 | { 9 | "host": "localhost", 10 | "port": %s 11 | }, 12 | { 13 | "host": "localhost", 14 | "port": %s 15 | }], 16 | "db": "%s", 17 | "options": { 18 | "import_all_collections": true 19 | } 20 | }, 21 | "index": { 22 | "name": "%s" 23 | } 24 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-river-store-statistics.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [ 5 | { 6 | "host": "localhost", 7 | "port": %s 8 | } 9 | ], 10 | "options": { 11 | "store_statistics": { 12 | "index": "%s", 13 | "type": "%s" 14 | } 15 | }, 16 | "db": "%s", 17 | "collection": "%s" 18 | }, 19 | index: { 20 | "name": "%s", 21 | "type": "%s" 22 | } 23 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-river-collection-filter.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | "host": "localhost", 6 | "port": %s 7 | }, 8 | { 9 | "host": "localhost", 10 | "port": %s 11 | }, 12 | { 13 | "host": "localhost", 14 | "port": %s 15 | }], 16 | "db": "%s", 17 | "collection": "%s", 18 | "filter": "%s" 19 | }, 20 | "index": { 21 | "name": "%s", 22 | "throttle_size": 2000 23 | } 24 | } -------------------------------------------------------------------------------- /manual-testing/issues/26/mongodb-river-simple-2.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | host: "localhost", 6 | port: "27017" 7 | }, 8 | { 9 | host: "localhost", 10 | port: "27018" 11 | }, 12 | { 13 | host: "localhost", 14 | port: "27019" 15 | }], 16 | "options": { 17 | "secondary_read_preference": true 18 | }, 19 | "db": "srs", 20 | "collection": "instances" 21 | }, 22 | "index": { 23 | "name": "srs", 24 | "type": "instances" 25 | } 26 | } -------------------------------------------------------------------------------- /manual-testing/issues/177/test-issue-177.js: -------------------------------------------------------------------------------- 1 | var status = rs.status() 2 | for (i=0; i < status.members.length; i++) { 3 | if (status.members[i].state == 1) { 4 | db = connect(status.members[i].name + "/local") 5 | } 6 | } 7 | 8 | use mydb177 9 | var o = 10 | { 11 | "firstName": "John", 12 | "lastName": "Doe", 13 | "age": 34, 14 | "title_from_mongo": "Developer" 15 | } 16 | 17 | db.mycollect1.save(o) 18 | 19 | var o2 = { 20 | "firstName": "Jim", 21 | "lastName": "Smith" 22 | } 23 | 24 | db.mycollect2.save(o2) -------------------------------------------------------------------------------- /manual-testing/issues/170/test-issue-170.js: -------------------------------------------------------------------------------- 1 | var status = rs.status() 2 | for (i=0; i < status.members.length; i++) { 3 | if (status.members[i].state == 1) { 4 | db = connect(status.members[i].name + "/local") 5 | } 6 | } 7 | 8 | var max = 1000000; 9 | use mydb170 10 | for (var i=0; i < max; i++) { 11 | var o = { 12 | "firstName": "John", 13 | "lastName": "Doe", 14 | "employeeId": i, 15 | "title": "Developer" 16 | } 17 | db.mycollection170.save(o); 18 | //print("save object " + o["_id"]); 19 | } 20 | -------------------------------------------------------------------------------- /manual-testing/issues/181/_01_create-index-with-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "settings" : { 3 | "number_of_shards" : 1, 4 | "mapper" : { 5 | "dynamic": false 6 | } 7 | }, 8 | "mappings" : { 9 | "author": { 10 | "properties": { 11 | "name": { 12 | "type": "string" 13 | }, 14 | "nationality": { 15 | "type": "string" 16 | } 17 | } 18 | }, 19 | "book": { 20 | "_parent": { 21 | "type": "author" 22 | } 23 | }, 24 | "chapter": { 25 | "_parent": { 26 | "type": "book" 27 | } 28 | } 29 | } 30 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/test-mongodb-river-with-authentication.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | host: "localhost", 6 | port: "27017" 7 | }], 8 | "options": { 9 | "secondary_read_preference": true 10 | }, 11 | "credentials": [{ 12 | db: "admin", 13 | user: "admin", 14 | password: "Skipper2000" 15 | }], 16 | db: "testriver", 17 | collection: "person", 18 | gridfs: "false" 19 | }, 20 | index: { 21 | name: "personindex", 22 | throttle_size: 2000 23 | } 24 | } -------------------------------------------------------------------------------- /manual-testing/issues/64/$ES_HOME/config/templates/template_1.json: -------------------------------------------------------------------------------- 1 | { 2 | "template_1" : { 3 | "template" : "authors*", 4 | "settings" : { 5 | "number_of_shards" : 1, 6 | "mapper" : { 7 | "dynamic": false 8 | } 9 | }, 10 | "mappings" : { 11 | "author": { 12 | "properties": { 13 | "name": { 14 | "type": "string" 15 | }, 16 | "nationality": { 17 | "type": "string" 18 | } 19 | } 20 | }, 21 | "book": { 22 | "_parent": { 23 | "type": "author" 24 | } 25 | } 26 | } 27 | } 28 | } -------------------------------------------------------------------------------- /manual-testing/issues/85/$ES_HOME/config/templates/template_1.json: -------------------------------------------------------------------------------- 1 | { 2 | "template_1" : { 3 | "template" : "authors*", 4 | "settings" : { 5 | "number_of_shards" : 1, 6 | "mapper" : { 7 | "dynamic": false 8 | } 9 | }, 10 | "mappings" : { 11 | "author": { 12 | "properties": { 13 | "name": { 14 | "type": "string" 15 | }, 16 | "nationality": { 17 | "type": "string" 18 | } 19 | } 20 | }, 21 | "book": { 22 | "_parent": { 23 | "type": "author" 24 | } 25 | } 26 | } 27 | } 28 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/gridfs/test-gridfs-mongodb-river.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | "host": "localhost", 6 | "port": %s 7 | }, 8 | { 9 | "host": "localhost", 10 | "port": %s 11 | }, 12 | { 13 | "host": "localhost", 14 | "port": %s 15 | }], 16 | "options": { 17 | "secondary_read_preference": true 18 | }, 19 | "db": "%s", 20 | "collection": "%s", 21 | "gridfs": true 22 | }, 23 | "index": { 24 | "name": "%s", 25 | "throttle_size": 2000 26 | } 27 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-river-skip-initial-import.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | "host": "localhost", 6 | "port": %s 7 | }, 8 | { 9 | "host": "localhost", 10 | "port": %s 11 | }, 12 | { 13 | "host": "localhost", 14 | "port": %s 15 | }], 16 | "options": { 17 | "skip_initial_import": true 18 | }, 19 | "db": "%s", 20 | "collection": "%s", 21 | "gridfs": false 22 | }, 23 | "index": { 24 | "name": "%s", 25 | "throttle_size": 2000 26 | } 27 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/embed/TokuRuntimeConfigBuilder.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb.embed; 2 | 3 | import de.flapdoodle.embed.mongo.Command; 4 | import de.flapdoodle.embed.mongo.config.RuntimeConfigBuilder; 5 | 6 | public class TokuRuntimeConfigBuilder extends RuntimeConfigBuilder { 7 | 8 | public TokuRuntimeConfigBuilder defaults(Command command) { 9 | super.defaults(command); 10 | artifactStore().overwriteDefault(new TokuArtifactStoreBuilder().defaults(command).build()); 11 | return this; 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-document-with-dbref.json: -------------------------------------------------------------------------------- 1 | { 2 | "_id": { "$oid" : "51A220FDB573DF1144000000" }, 3 | "body": "this is a for a post", 4 | "category": { 5 | "$ref": "category", 6 | "$id": { "$oid" : "5194272CFDEA65E5D6000021" } 7 | }, 8 | "images": [ 9 | 10 | ], 11 | "location": [ 12 | -77.212365, 13 | 39.114207 14 | ], 15 | "innerDoc" : { 16 | "innerThing" : "testing" 17 | }, 18 | "postDate": { "$date": 1388853809000.000000 }, 19 | "postalCode": "20878", 20 | "title": "This is a title" 21 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-river-with-type.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | "host": "localhost", 6 | "port": %s 7 | }, 8 | { 9 | "host": "localhost", 10 | "port": %s 11 | }, 12 | { 13 | "host": "localhost", 14 | "port": %s 15 | }], 16 | "options": { 17 | "secondary_read_preference": true 18 | }, 19 | "db": "%s", 20 | "collection": "%s", 21 | "gridfs": false 22 | }, 23 | "index": { 24 | "name": "%s", 25 | "type": "%s", 26 | "throttle_size": 2000 27 | } 28 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/test-mongodb-river-definition-307.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [ 5 | {host: "127.0.0.1"}, 6 | {host: "localhost"} 7 | ], 8 | "options": { 9 | "connect_timeout": 11000, 10 | "secondary_read_preference": true, 11 | "is_mongos": true 12 | }, 13 | "credentials": [{ 14 | db: "admin", 15 | user: "admin", 16 | password: "secret" 17 | }], 18 | db: "mydatabase", 19 | collection: "mycollection", 20 | gridfs: "false" 21 | }, 22 | index: { 23 | name: "myindex", 24 | throttle_size: 2000 25 | } 26 | } -------------------------------------------------------------------------------- /manual-testing/issues/61/secured-mongodb-river-simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | host: "localhost", 6 | port: "27017" 7 | }, 8 | { 9 | host: "localhost", 10 | port: "27018" 11 | }, 12 | { 13 | host: "localhost", 14 | port: "27019" 15 | }], 16 | "options": { 17 | "secondary_read_preference": true 18 | }, 19 | "db": "mydb", 20 | "collection": "mycollec", 21 | "credentials": 22 | [ 23 | { db: "admin", user: "admin", password: "admin" } 24 | ] 25 | }, 26 | "index": { 27 | "name": "mydb61", 28 | "type": "instances" 29 | } 30 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-river-drop-collection.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | "host": "localhost", 6 | "port": %s 7 | }, 8 | { 9 | "host": "localhost", 10 | "port": %s 11 | }, 12 | { 13 | "host": "localhost", 14 | "port": %s 15 | }], 16 | "options": { 17 | "secondary_read_preference": true, 18 | "drop_collection": %s 19 | }, 20 | "db": "%s", 21 | "collection": "%s", 22 | "gridfs": false 23 | }, 24 | "index": { 25 | "name": "%s", 26 | "type": "%s", 27 | "throttle_size": 2000 28 | } 29 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-river-exclude-fields.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | "host": "localhost", 6 | "port": %s 7 | }, 8 | { 9 | "host": "localhost", 10 | "port": %s 11 | }, 12 | { 13 | "host": "localhost", 14 | "port": %s 15 | }], 16 | "options": { 17 | "secondary_read_preference": true, 18 | "exclude_fields": %s 19 | }, 20 | "db": "%s", 21 | "collection": "%s", 22 | "gridfs": false 23 | }, 24 | "index": { 25 | "name": "%s", 26 | "type": "%s", 27 | "throttle_size": 2000 28 | } 29 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-river-include-fields.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | "host": "localhost", 6 | "port": %s 7 | }, 8 | { 9 | "host": "localhost", 10 | "port": %s 11 | }, 12 | { 13 | "host": "localhost", 14 | "port": %s 15 | }], 16 | "options": { 17 | "secondary_read_preference": true, 18 | "include_fields": %s 19 | }, 20 | "db": "%s", 21 | "collection": "%s", 22 | "gridfs": false 23 | }, 24 | "index": { 25 | "name": "%s", 26 | "type": "%s", 27 | "throttle_size": 2000 28 | } 29 | } -------------------------------------------------------------------------------- /manual-testing/issues/209/test-import-document.js: -------------------------------------------------------------------------------- 1 | var status = rs.status() 2 | for (var i=0; i < status.members.length; i++) { 3 | if (status.members[i].state == 1) { 4 | db = connect(status.members[i].name + "/local") 5 | } 6 | } 7 | use mydb209 8 | // db = db.getMongo().getDB( "mydb209" ); 9 | 10 | var count = db.mycollec209.count() 11 | print('count: ' + count) 12 | var max = count + 1000000 13 | for (var i=count+1; i <= max; i++) { 14 | var created = new Date() 15 | var item = {"user": "joe.doe-" + i, "scores": [count], "deleted": false, "created": created} 16 | // print(item); 17 | db.mycollec209.save(item) 18 | } -------------------------------------------------------------------------------- /manual-testing/issues/61/secured-mongodb-river-simple-local-user.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | host: "localhost", 6 | port: "27017" 7 | }, 8 | { 9 | host: "localhost", 10 | port: "27018" 11 | }, 12 | { 13 | host: "localhost", 14 | port: "27019" 15 | }], 16 | "options": { 17 | "secondary_read_preference": true 18 | }, 19 | "db": "mydb", 20 | "collection": "mycollec", 21 | "credentials": 22 | [ 23 | { db: "local", user: "local", password: "local" } 24 | ] 25 | }, 26 | "index": { 27 | "name": "mydb61", 28 | "type": "instances" 29 | } 30 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/script/test-mongodb-river-with-script.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | "host": "localhost", 6 | "port": %s 7 | }, 8 | { 9 | "host": "localhost", 10 | "port": %s 11 | }, 12 | { 13 | "host": "localhost", 14 | "port": %s 15 | }], 16 | "options": { 17 | "secondary_read_preference": true 18 | }, 19 | "db": "%s", 20 | "collection": "%s", 21 | "gridfs": false, 22 | "scriptType": "%s", 23 | "script": "%s" 24 | }, 25 | "index": { 26 | "name": "%s", 27 | "type": "%s", 28 | "throttle_size": 2000 29 | } 30 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-river-include-collection.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | "host": "localhost", 6 | "port": %s 7 | }, 8 | { 9 | "host": "localhost", 10 | "port": %s 11 | }, 12 | { 13 | "host": "localhost", 14 | "port": %s 15 | }], 16 | "options": { 17 | "secondary_read_preference": true, 18 | "include_collection": "%s" 19 | }, 20 | "db": "%s", 21 | "collection": "%s", 22 | "gridfs": false 23 | }, 24 | "index": { 25 | "name": "%s", 26 | "type": "%s", 27 | "throttle_size": 2000 28 | } 29 | } -------------------------------------------------------------------------------- /manual-testing/issues/181/$ES_HOME/config/templates/template_1.json: -------------------------------------------------------------------------------- 1 | { 2 | "template_1" : { 3 | "template" : "authors*", 4 | "settings" : { 5 | "number_of_shards" : 1, 6 | "mapper" : { 7 | "dynamic": false 8 | } 9 | }, 10 | "mappings" : { 11 | "author": { 12 | "properties": { 13 | "name": { 14 | "type": "string" 15 | }, 16 | "nationality": { 17 | "type": "string" 18 | } 19 | } 20 | }, 21 | "book": { 22 | "_parent": { 23 | "type": "author" 24 | } 25 | }, 26 | "chapter": { 27 | "_parent": { 28 | "type": "book" 29 | } 30 | } 31 | } 32 | } 33 | } -------------------------------------------------------------------------------- /manual-testing/issues/95/index-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "fulltext": { 3 | "_all": { 4 | "indexAnalyzer": "ik", 5 | "searchAnalyzer": "ik", 6 | "term_vector": "no", 7 | "store": "false" 8 | }, 9 | "properties": { 10 | "content": { 11 | "type": "string", 12 | "store": "no", 13 | "term_vector": "with_positions_offsets", 14 | "indexAnalyzer": "ik", 15 | "searchAnalyzer": "ik", 16 | "include_in_all": "true", 17 | "boost": 8 18 | } 19 | } 20 | } 21 | } -------------------------------------------------------------------------------- /install-local.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Installs the current version from this directory to your local elasticsearch installation for development. 4 | 5 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 6 | PWD="`pwd`" 7 | ES_HOME=${ES_HOME:-/usr/share/elasticsearch} 8 | 9 | mvn -Dmaven.test.skip=true package 10 | sudo $ES_HOME/bin/plugin --remove elasticsearch-river-mongodb 11 | 12 | VERSION=$(grep -E -m 1 -o "(.*)" pom.xml | sed -e 's,.*\([^<]*\).*,\1,g' ) 13 | 14 | sudo $ES_HOME/bin/plugin --url "file://${DIR}/target/releases/elasticsearch-river-mongodb-${VERSION}.zip" --install elasticsearch-river-mongodb 15 | 16 | 17 | -------------------------------------------------------------------------------- /manual-testing/issues/61/secured-mongodb-river-simple-local-mydb-user.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | host: "localhost", 6 | port: "27017" 7 | }, 8 | { 9 | host: "localhost", 10 | port: "27018" 11 | }, 12 | { 13 | host: "localhost", 14 | port: "27019" 15 | }], 16 | "options": { 17 | "secondary_read_preference": true 18 | }, 19 | "db": "mydb", 20 | "collection": "mycollec", 21 | "credentials": 22 | [ 23 | { db: "local", user: "local", password: "local" }, 24 | { db: "mydb", user: "mydb", password: "mydb" } 25 | ] 26 | }, 27 | "index": { 28 | "name": "mydb61", 29 | "type": "instances" 30 | } 31 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-river-initial-timestamp.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | "host": "localhost", 6 | "port": %s 7 | }, 8 | { 9 | "host": "localhost", 10 | "port": %s 11 | }, 12 | { 13 | "host": "localhost", 14 | "port": %s 15 | }], 16 | "options": { 17 | "secondary_read_preference": true, 18 | "initial_timestamp": { 19 | "script_type": "%s", 20 | "script": "%s" 21 | } 22 | }, 23 | "db": "%s", 24 | "collection": "%s", 25 | "gridfs": false 26 | }, 27 | "index": { 28 | "name": "%s", 29 | "type": "%s", 30 | "throttle_size": 2000 31 | } 32 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/test-mongodb-river-definition-167.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [ 5 | {host: "127.0.0.1"}, 6 | {host: "localhost"} 7 | ], 8 | "options": { 9 | "connect_timeout": 11000, 10 | "secondary_read_preference": true, 11 | "include_collection": "mycollection", 12 | "skip_initial_import": true, 13 | "store_statistics": true 14 | }, 15 | "credentials": [{ 16 | db: "admin", 17 | user: "admin", 18 | password: "secret" 19 | }], 20 | db: "mydatabase", 21 | collection: "mycollection", 22 | gridfs: "false" 23 | }, 24 | index: { 25 | name: "myindex", 26 | throttle_size: 2000 27 | } 28 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/test-mongodb-river-definition-159.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [ 5 | {host: "127.0.0.1"}, 6 | {host: "localhost"} 7 | ], 8 | "options": { 9 | "connect_timeout": 11000, 10 | "secondary_read_preference": true, 11 | "include_collection": "mycollection", 12 | "advanced_transformation": true, 13 | "parent_types": ["parent1", "parent2"] 14 | }, 15 | "credentials": [{ 16 | db: "admin", 17 | user: "admin", 18 | password: "secret" 19 | }], 20 | db: "mydatabase", 21 | collection: "mycollection", 22 | gridfs: "false" 23 | }, 24 | index: { 25 | name: "myindex", 26 | throttle_size: 2000 27 | } 28 | } -------------------------------------------------------------------------------- /manual-testing/issues/89/$ES_HOME/config/scripts/testuuid.js: -------------------------------------------------------------------------------- 1 | // This script requires a change in elasticsearch-lang-javascript plugin tu spport CommonJS 2 | var helper = require('uuidhelpers') 3 | var base64 = '8Fq3Wd+BGUGD2CbsA4wasg=='; 4 | var subtype = 3; 5 | var good = '59b75af0-81df-4119-83d8-26ec038c1ab2'; 6 | logger.debug("Convert subtype/base64 {} - {}", subtype, base64); 7 | logger.debug("toUUID {}", helper.toUUID(base64)); 8 | logger.debug("toJUUID {}", helper.toJUUID(base64)); 9 | logger.debug("toCSUUID {}", helper.toCSUUID(base64)); 10 | logger.debug("toPYUUID {}", helper.toPYUUID(base64)); 11 | logger.debug("toHexUUID {}", helper.toHexUUID(subtype, base64)); 12 | logger.debug("Result should be {}", good); 13 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/advanced/test-mongodb-river-with-advanced-transformation.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | "host": "localhost", 6 | "port": %s 7 | }, 8 | { 9 | "host": "localhost", 10 | "port": %s 11 | }, 12 | { 13 | "host": "localhost", 14 | "port": %s 15 | }], 16 | "options": { 17 | "secondary_read_preference": true, 18 | "advanced_transformation": true, 19 | "parent_types": %s 20 | }, 21 | "db": "%s", 22 | "collection": "%s", 23 | "gridfs": false, 24 | "script_type": "%s", 25 | "script": "%s" 26 | }, 27 | "index": { 28 | "name": "%s", 29 | "type": "%s", 30 | "throttle_size": 2000 31 | } 32 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/tokumx/RiverTokuMXTestAbstract.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb.tokumx; 2 | 3 | import org.elasticsearch.river.mongodb.RiverMongoDBTestAbstract; 4 | import org.testng.SkipException; 5 | import org.testng.annotations.BeforeClass; 6 | 7 | public abstract class RiverTokuMXTestAbstract extends RiverMongoDBTestAbstract { 8 | 9 | protected RiverTokuMXTestAbstract() { 10 | super(ExecutableType.TOKUMX); 11 | } 12 | 13 | @BeforeClass 14 | protected void checkEnvironment() { 15 | if (!tokuIsSupported()) { 16 | throw new SkipException("Skipping tests because running tests on environment not supported by TokuMX."); 17 | } 18 | } 19 | 20 | 21 | } 22 | -------------------------------------------------------------------------------- /src/test/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=INFO, out, file 2 | 3 | log4j.appender.out=org.apache.log4j.ConsoleAppender 4 | log4j.appender.out.layout=org.apache.log4j.PatternLayout 5 | log4j.appender.out.layout.conversionPattern=[%d{ISO8601}][%-5p][%-25c] %m%n 6 | 7 | log4j.appender.file=org.apache.log4j.FileAppender 8 | log4j.appender.file.File=${project.build.directory}/test-elasticsearch-river-mongo.log 9 | log4j.appender.file.Append=false 10 | log4j.appender.file.layout=org.apache.log4j.PatternLayout 11 | log4j.appender.file.layout.ConversionPattern=%d{ISO8601} %-5p %c %x - %m%n 12 | 13 | log4j.logger.org.elasticsearch.river.mongodb=TRACE 14 | #, out, file 15 | log4j.logger.org.elasticsearch.rest.action=TRACE 16 | #, out, file 17 | 18 | -------------------------------------------------------------------------------- /manual-testing/issues/181/_03-import-document.js: -------------------------------------------------------------------------------- 1 | var status = rs.status() 2 | for (i=0; i < status.members.length; i++) { 3 | if (status.members[i].state == 1) { 4 | db = connect(status.members[i].name + "/local") 5 | } 6 | } 7 | 8 | use mydb181 9 | var author = 10 | { 11 | "_id": "herge", 12 | "name": "Herge", 13 | "nationality": "Belge" 14 | } 15 | 16 | db.authors.save(author) 17 | 18 | var book = { 19 | "_id": "tintin-au-congo", 20 | "_parentId": "herge", 21 | "name": "Tintin au Congo", 22 | "genre": "Bande dessinee", 23 | "publisher": "Herge" 24 | } 25 | 26 | db.books.save(book) 27 | 28 | var chapter = { 29 | "_parentId": "tintin-au-congo", 30 | "title": "Introduction", 31 | "page": 1 32 | } 33 | 34 | db.chapters.save(chapter) 35 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/test-mongodb-river-definition.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | host: "localhost", 6 | port: "27017" 7 | }], 8 | "options": { 9 | "connect_timeout": 11000, 10 | "secondary_read_preference": true, 11 | "include_collection": "mycollection", 12 | "advanced_transformation": true, 13 | "parent_types": ["parent1", "parent2"] 14 | }, 15 | "credentials": [{ 16 | db: "admin", 17 | user: "admin", 18 | password: "secret" 19 | }], 20 | db: "mydatabase", 21 | collection: "mycollection", 22 | gridfs: "false" 23 | }, 24 | index: { 25 | name: "myindex", 26 | throttle_size: 2000, 27 | bulk_size:500, 28 | concurrent_bulk_requests: 40 29 | } 30 | } -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/test-mongodb-river-new-definition.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "mongodb", 3 | "mongodb": { 4 | "servers": [{ 5 | host: "localhost", 6 | port: "27017" 7 | }], 8 | "options": { 9 | "connect_timeout": 11000, 10 | "secondary_read_preference": true, 11 | "include_collection": "mycollection", 12 | "advanced_transformation": true, 13 | "parent_types": ["parent1", "parent2"] 14 | }, 15 | "credentials": [{ 16 | db: "admin", 17 | user: "admin", 18 | password: "secret" 19 | }], 20 | db: "mydatabase", 21 | collection: "mycollection", 22 | gridfs: "false" 23 | }, 24 | index: { 25 | name: "myindex", 26 | throttle_size: 2000, 27 | bulk: { 28 | actions: 500, 29 | size: "20mb", 30 | concurrent_requests: 40, 31 | flush_interval: "50ms" 32 | } 33 | } 34 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | test-output/ 3 | 4 | /build.gradle 5 | *.log 6 | maven-eclipse.xml 7 | 8 | # Intellij 9 | .idea 10 | *.iml 11 | 12 | # Eclipse (use 'mvn eclipse:eclipse' to build eclipse projects) 13 | # The only configuration files which are not ignored are certain files in 14 | # .settings (as listed below) since these files ensure common coding 15 | # style across Eclipse and IDEA. 16 | # Other files (.project, .classpath) should be generated through Maven which 17 | # will correctly set the classpath based on the declared dependencies. 18 | .project 19 | .classpath 20 | .externalToolBuilders 21 | eclipse-build 22 | */.project 23 | */.classpath 24 | */eclipse-build 25 | /.settings/ 26 | !/.settings/org.eclipse.core.resources.prefs 27 | !/.settings/org.eclipse.jdt.core.prefs 28 | !/.settings/org.eclipse.jdt.ui.prefs 29 | !/.settings/org.eclipse.jdt.groovy.core.prefs 30 | bin 31 | -------------------------------------------------------------------------------- /src/test/resources/settings.yml: -------------------------------------------------------------------------------- 1 | # Note, that for development on a local machine, with small indices, it usually 2 | # makes sense to "disable" the distributed features: 3 | # 4 | node: 5 | local: true 6 | 7 | path: 8 | data: target/data 9 | plugins: target/plugins 10 | logs: target/log 11 | conf: target/config 12 | 13 | index: 14 | number_of_shards: 1 15 | number_of_replicas: 0 16 | store: 17 | type: memory 18 | gateway: 19 | type: none 20 | 21 | cluster: 22 | name: es-test-cluster 23 | 24 | script.disable_dynamic: false 25 | 26 | plugins: 27 | mapper-attachments: elasticsearch/elasticsearch-mapper-attachments/2.7.0 28 | lang-javascript: elasticsearch/elasticsearch-lang-javascript/2.7.0 29 | 30 | mongodb: 31 | version: 3.0.0 32 | use_dynamic_ports: false 33 | 34 | tokumx: 35 | version: 1.5.1 36 | use_dynamic_ports: false 37 | -------------------------------------------------------------------------------- /src/main/assemblies/plugin.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | plugin 4 | 5 | zip 6 | 7 | false 8 | 9 | 10 | / 11 | true 12 | true 13 | 14 | org.elasticsearch:elasticsearch 15 | 16 | 17 | 18 | 19 | 20 | org.mongodb:mongo-java-driver 21 | 22 | 23 | 24 | 25 | 26 | _site 27 | src/site 28 | 29 | 30 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/river/mongodb/SharedContext.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb; 2 | 3 | import java.util.concurrent.BlockingQueue; 4 | 5 | import org.elasticsearch.river.mongodb.MongoDBRiver.QueueEntry; 6 | 7 | /** 8 | * Holds mutable state to be shared between river, slurper, and indexer. 9 | */ 10 | public class SharedContext { 11 | 12 | private final BlockingQueue stream; 13 | private Status status; 14 | 15 | public SharedContext(BlockingQueue stream, Status status) { 16 | this.stream = stream; 17 | this.status = status; 18 | } 19 | 20 | public BlockingQueue getStream() { 21 | return stream; 22 | } 23 | 24 | public Status getStatus() { 25 | return status; 26 | } 27 | 28 | public void setStatus(Status status) { 29 | this.status = status; 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/gridfs/test-attachment.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 |

Hello world

6 |

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aliquam 7 | vitae ligula ipsum. Aliquam aliquam mi id sem mattis a malesuada lorem 8 | ullamcorper. Maecenas sed ligula metus, in sodales velit. Pellentesque 9 | fringilla, justo quis congue aliquet, libero velit auctor tortor, nec 10 | porttitor enim arcu volutpat erat. Vestibulum id elit arcu, vel 11 | venenatis felis. Donec enim eros, commodo eget gravida a, feugiat 12 | scelerisque massa. Mauris felis tellus, placerat non imperdiet eu, 13 | tincidunt ut ipsum. Aenean laoreet semper faucibus. Ut eget sapien at 14 | neque venenatis egestas porta et elit. Ut diam augue, tempus sed 15 | malesuada vel, molestie sit amet arcu. Quisque tempus, nunc id tempus 16 | vulputate, ipsum justo venenatis ligula, eu fermentum massa lacus ut 17 | turpis. Maecenas non orci ipsum, vitae auctor erat.

18 | 19 | -------------------------------------------------------------------------------- /manual-testing/issues/190/02-map-reduce.js: -------------------------------------------------------------------------------- 1 | var status = rs.status() 2 | for (i=0; i < status.members.length; i++) { 3 | if (status.members[i].state == 1) { 4 | db = connect(status.members[i].name + "/local") 5 | } 6 | } 7 | 8 | function getRandomInt (min, max) { 9 | return Math.floor(Math.random() * (max - min + 1)) + min; 10 | } 11 | 12 | use mydb1900 13 | 14 | for (var i=0; i < 20; i++) { 15 | var orderA = 16 | { 17 | "name": "orderA-" + i, 18 | "cust_id": getRandomInt(0, 20), 19 | "amount": getRandomInt(1, 500), 20 | "status": "A" 21 | } 22 | db.orders.save(orderA) 23 | var orderB = 24 | { 25 | "name": "orderB-" + i, 26 | "cust_id": getRandomInt(0, 20), 27 | "amount": getRandomInt(1, 500), 28 | "status": "B" 29 | } 30 | db.orders.save(orderB) 31 | } 32 | 33 | db.orders.mapReduce( 34 | function() { 35 | emit(this.cust_id, this.amount); 36 | }, 37 | function (key, values) { 38 | return Array.sum( values ) 39 | }, 40 | { 41 | "query": {"status": "A"}, 42 | "out": "order_totals" 43 | } 44 | ) -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/embed/TokuMongodExecutable.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb.embed; 2 | 3 | import java.io.IOException; 4 | 5 | import de.flapdoodle.embed.mongo.MongodExecutable; 6 | import de.flapdoodle.embed.mongo.MongodProcess; 7 | import de.flapdoodle.embed.mongo.config.IMongodConfig; 8 | import de.flapdoodle.embed.process.config.IRuntimeConfig; 9 | import de.flapdoodle.embed.process.distribution.Distribution; 10 | import de.flapdoodle.embed.process.extract.IExtractedFileSet; 11 | 12 | public class TokuMongodExecutable extends MongodExecutable { 13 | 14 | public TokuMongodExecutable(Distribution distribution, IMongodConfig mongodConfig, IRuntimeConfig runtimeConfig, IExtractedFileSet files) { 15 | super(distribution, mongodConfig, runtimeConfig, files); 16 | } 17 | 18 | @Override 19 | protected MongodProcess start(Distribution distribution, IMongodConfig config, IRuntimeConfig runtime) throws IOException { 20 | return new TokuMongodProcess(distribution, config, runtime, this); 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/river/mongodb/Operation.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb; 2 | 3 | public enum Operation { 4 | INSERT(MongoDBRiver.OPLOG_INSERT_OPERATION), 5 | UPDATE(MongoDBRiver.OPLOG_UPDATE_OPERATION), 6 | DELETE(MongoDBRiver.OPLOG_DELETE_OPERATION), 7 | DROP_COLLECTION("dc"), 8 | DROP_DATABASE("dd"), 9 | COMMAND(MongoDBRiver.OPLOG_COMMAND_OPERATION), 10 | UNKNOWN(null); 11 | 12 | private String value; 13 | 14 | private Operation(String value) { 15 | this.value = value; 16 | } 17 | 18 | public String getValue() { 19 | return value; 20 | } 21 | 22 | public static Operation fromString(String value) { 23 | if (value != null) { 24 | for (Operation operation : Operation.values()) { 25 | if (value.equalsIgnoreCase(operation.getValue())) { 26 | return operation; 27 | } 28 | } 29 | if (MongoDBRiver.OPLOG_UPDATE_ROW_OPERATION.equalsIgnoreCase(value)) { 30 | return Operation.UPDATE; 31 | } 32 | } 33 | return Operation.UNKNOWN; 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/embed/TokuArtifactStoreBuilder.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb.embed; 2 | 3 | import de.flapdoodle.embed.mongo.Command; 4 | import de.flapdoodle.embed.process.config.store.ILibraryStore; 5 | import de.flapdoodle.embed.process.config.store.PlatformLibraryStoreBuilder; 6 | import de.flapdoodle.embed.process.distribution.Platform; 7 | import de.flapdoodle.embed.process.extract.UUIDTempNaming; 8 | import de.flapdoodle.embed.process.io.directories.PropertyOrPlatformTempDir; 9 | 10 | public class TokuArtifactStoreBuilder extends de.flapdoodle.embed.process.store.ArtifactStoreBuilder { 11 | 12 | public TokuArtifactStoreBuilder defaults(Command command) { 13 | tempDir().setDefault(new PropertyOrPlatformTempDir()); 14 | executableNaming().setDefault(new UUIDTempNaming()); 15 | download().setDefault(new TokuDownloadConfigBuilder().defaultsForCommand(command).build()); 16 | downloader().setDefault(new TokuDownloader()); 17 | libraries().setDefault(libraryStore()); 18 | return this; 19 | } 20 | 21 | private ILibraryStore libraryStore() { 22 | PlatformLibraryStoreBuilder libraryStoreBuilder = new PlatformLibraryStoreBuilder().defaults(); 23 | libraryStoreBuilder.setLibraries(Platform.Linux, new String[] { "libHotBackup.so" }); 24 | return libraryStoreBuilder.build(); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/river/mongodb/MongoConfig.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb; 2 | 3 | import java.util.List; 4 | 5 | import com.mongodb.ServerAddress; 6 | 7 | public class MongoConfig { 8 | 9 | private boolean isMongos; 10 | private final List shards; 11 | 12 | public MongoConfig(boolean isMongos, List shards) { 13 | this.isMongos = isMongos; 14 | this.shards = shards; 15 | } 16 | 17 | public List getShards() { 18 | return shards; 19 | } 20 | 21 | public boolean isMongos() { 22 | return isMongos; 23 | } 24 | 25 | public static class Shard { 26 | 27 | private final String name; 28 | private final List replicas; 29 | private final Timestamp latestOplogTimestamp; 30 | 31 | public Shard(String name, List replicas, Timestamp latestOplogTimestamp) { 32 | this.name = name; 33 | this.replicas = replicas; 34 | this.latestOplogTimestamp = latestOplogTimestamp; 35 | } 36 | 37 | public String getName() { 38 | return name; 39 | } 40 | public List getReplicas() { 41 | return replicas; 42 | } 43 | public Timestamp getLatestOplogTimestamp() { 44 | return latestOplogTimestamp; 45 | } 46 | } 47 | 48 | } -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/river/mongodb/MongoDBRiverModule.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elastic Search and Shay Banon under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. Elastic Search licenses this 6 | * file to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package org.elasticsearch.river.mongodb; 21 | 22 | import org.elasticsearch.common.inject.AbstractModule; 23 | import org.elasticsearch.river.River; 24 | 25 | /** 26 | * @author flaper87 (Flavio Percoco Premoli) 27 | * @author aparo (Alberto Paro) 28 | * @author kryptt (Rodolfo Hansen) 29 | */ 30 | public class MongoDBRiverModule extends AbstractModule { 31 | 32 | @Override 33 | protected void configure() { 34 | bind(River.class).to(MongoDBRiver.class).asEagerSingleton(); 35 | } 36 | 37 | } 38 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/embed/TokuMXStarter.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Adapted from de.flapdoodle.embed.mongo.MongodStarter. 3 | * Ultimately, it would be best to migrate this into Flapdoodle's mongo.config project 4 | */ 5 | package org.elasticsearch.river.mongodb.embed; 6 | 7 | import de.flapdoodle.embed.mongo.Command; 8 | import de.flapdoodle.embed.mongo.MongodExecutable; 9 | import de.flapdoodle.embed.mongo.MongodProcess; 10 | import de.flapdoodle.embed.mongo.config.IMongodConfig; 11 | import de.flapdoodle.embed.process.config.IRuntimeConfig; 12 | import de.flapdoodle.embed.process.distribution.Distribution; 13 | import de.flapdoodle.embed.process.extract.IExtractedFileSet; 14 | import de.flapdoodle.embed.process.runtime.Starter; 15 | 16 | public class TokuMXStarter extends Starter { 17 | 18 | private TokuMXStarter(IRuntimeConfig config) { 19 | super(config); 20 | } 21 | 22 | public static TokuMXStarter getInstance(IRuntimeConfig config) { 23 | return new TokuMXStarter(config); 24 | } 25 | 26 | public static TokuMXStarter getDefaultInstance() { 27 | return getInstance(new TokuRuntimeConfigBuilder().defaults(Command.MongoD).build()); 28 | } 29 | 30 | @Override 31 | protected MongodExecutable newExecutable(IMongodConfig mongodConfig, Distribution distribution, IRuntimeConfig runtime, 32 | IExtractedFileSet files) { 33 | return new TokuMongodExecutable(distribution, mongodConfig, runtime, files); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/embed/TokuDownloadConfigBuilder.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb.embed; 2 | 3 | import org.apache.commons.lang3.Validate; 4 | 5 | import de.flapdoodle.embed.mongo.Command; 6 | import de.flapdoodle.embed.process.extract.UUIDTempNaming; 7 | import de.flapdoodle.embed.process.io.directories.UserHome; 8 | import de.flapdoodle.embed.process.io.progress.StandardConsoleProgressListener; 9 | 10 | public class TokuDownloadConfigBuilder extends de.flapdoodle.embed.process.config.store.DownloadConfigBuilder { 11 | 12 | private static final String DEFAULT_DOWNLOAD_PATH = "http://www.tokutek.com/tokumx-for-mongodb/tokumx-community-edition-download/?file="; 13 | 14 | public TokuDownloadConfigBuilder packageResolverForCommand(Command command) { 15 | Validate.isTrue(command == Command.MongoD, "Only command de.flapdoodle.embed.mongo.Command.MongoD is currently supported"); 16 | packageResolver(new TokuPaths()); 17 | return this; 18 | } 19 | 20 | public TokuDownloadConfigBuilder defaultsForCommand(Command command) { 21 | return defaults().packageResolverForCommand(command); 22 | } 23 | 24 | public TokuDownloadConfigBuilder defaults() { 25 | fileNaming().setDefault(new UUIDTempNaming()); 26 | downloadPath(DEFAULT_DOWNLOAD_PATH); 27 | progressListener().setDefault(new StandardConsoleProgressListener()); 28 | artifactStorePath().setDefault(new UserHome(".embedmongo")); 29 | downloadPrefix().setDefault(new DownloadPrefix("embedtokumx-download")); 30 | userAgent().setDefault(new UserAgent( 31 | "Mozilla/5.0 (compatible; elasticsearch-river-mongodb tests; https://github.com/richardwilly98/elasticsearch-river-mongodb)")); 32 | return this; 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/embed/TokuMongodProcess.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb.embed; 2 | 3 | import java.io.IOException; 4 | import java.util.HashMap; 5 | import java.util.Map; 6 | import java.util.logging.Logger; 7 | 8 | import de.flapdoodle.embed.mongo.MongodExecutable; 9 | import de.flapdoodle.embed.mongo.MongodProcess; 10 | import de.flapdoodle.embed.mongo.config.IMongodConfig; 11 | import de.flapdoodle.embed.process.config.IRuntimeConfig; 12 | import de.flapdoodle.embed.process.config.store.FileType; 13 | import de.flapdoodle.embed.process.distribution.Distribution; 14 | import de.flapdoodle.embed.process.distribution.Platform; 15 | import de.flapdoodle.embed.process.extract.IExtractedFileSet; 16 | 17 | public class TokuMongodProcess extends MongodProcess { 18 | private static Logger logger = Logger.getLogger(TokuMongodProcess.class.getName()); 19 | 20 | public TokuMongodProcess(Distribution distribution, IMongodConfig config, IRuntimeConfig runtimeConfig, 21 | MongodExecutable mongodExecutable) throws IOException { 22 | super(distribution, config, runtimeConfig, mongodExecutable); 23 | } 24 | 25 | @Override 26 | protected Map getEnvironment(Distribution distribution, IMongodConfig config, IExtractedFileSet exe) { 27 | logger.info("ExtractedFileSet: " + exe); 28 | for (FileType type : FileType.values()) { 29 | logger.info("" + type + " files: " + exe.files(type)); 30 | } 31 | HashMap environment = new HashMap(); 32 | // set LD_LIBRARY_PATH 33 | if (distribution.getPlatform() == Platform.Linux) { 34 | environment.put("LD_LIBRARY_PATH", exe.executable().getParent()); 35 | } 36 | return environment; 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/river/mongodb/StatusChecker.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb; 2 | 3 | import org.elasticsearch.river.mongodb.util.MongoDBRiverHelper; 4 | 5 | class StatusChecker extends MongoDBRiverComponent implements Runnable { 6 | 7 | private final MongoDBRiver mongoDBRiver; 8 | private final MongoDBRiverDefinition definition; 9 | private final SharedContext context; 10 | 11 | public StatusChecker(MongoDBRiver mongoDBRiver, MongoDBRiverDefinition definition, SharedContext context) { 12 | super(mongoDBRiver); 13 | this.mongoDBRiver = mongoDBRiver; 14 | this.definition = definition; 15 | this.context = context; 16 | } 17 | 18 | @Override 19 | public void run() { 20 | while (true) { 21 | try { 22 | Status status = MongoDBRiverHelper.getRiverStatus(this.mongoDBRiver.esClient, this.definition.getRiverName()); 23 | if (status != this.context.getStatus()) { 24 | if (status == Status.RUNNING && this.context.getStatus() != Status.STARTING) { 25 | logger.trace("About to start river: {}", this.definition.getRiverName()); 26 | mongoDBRiver.internalStartRiver(); 27 | } else if (status == Status.STOPPED) { 28 | logger.info("About to stop river: {}", this.definition.getRiverName()); 29 | mongoDBRiver.internalStopRiver(); 30 | } 31 | } 32 | Thread.sleep(1000L); 33 | } catch (InterruptedException e) { 34 | logger.debug("Status thread interrupted", e, (Object) null); 35 | Thread.currentThread().interrupt(); 36 | break; 37 | } 38 | 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /test-attachment.txt: -------------------------------------------------------------------------------- 1 | -- create new index 2 | $ curl -XPUT "localhost:9200/testindex" -d ' 3 | > { settings: { index: { number_of_shards: 1, number_of_replicas: 0} 4 | > } 5 | > }' 6 | 7 | 8 | -- create mapping for testattachment type 9 | $ curl -XPUT "localhost:9200/testindex/testattachment/_mapping" -d ' 10 | { testattachment: { 11 | properties: { 12 | file : { type: "attachment"} 13 | } 14 | } 15 | }' 16 | 17 | -- import binary content 18 | $ curl -XPUT "localhost:9200/testindex/testattachment/1" -d @attachment.txt 19 | 20 | -- get mapping 21 | $ curl -XGET "localhost:9200/testindex/testattachment/_mapping?pretty=true" 22 | % Total % Received % Xferd Average Speed Time Time Time Current 23 | Dload Upload Total Spent Left Speed 24 | 100 868 100 868 0 0 856 0 0:00:01 0:00:01 --:--:-- 847k{ 25 | "testattachment" : { 26 | "properties" : { 27 | "file" : { 28 | "path" : "full", 29 | "type" : "attachment", 30 | "fields" : { 31 | "author" : { 32 | "type" : "string" 33 | }, 34 | "title" : { 35 | "type" : "string" 36 | }, 37 | "keywords" : { 38 | "type" : "string" 39 | }, 40 | "file" : { 41 | "type" : "string" 42 | }, 43 | "date" : { 44 | "format" : "dateOptionalTime", 45 | "type" : "date" 46 | }, 47 | "content_type" : { 48 | "type" : "string" 49 | } 50 | } 51 | }, 52 | "metadata" : { 53 | "dynamic" : "true", 54 | "properties" : { 55 | "md5" : { 56 | "type" : "string" 57 | }, 58 | "length" : { 59 | "type" : "long" 60 | } 61 | } 62 | } 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/RiverMongoDropCollectionDisabledTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elastic Search and Shay Banon under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. Elastic Search licenses this 6 | * file to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | package org.elasticsearch.river.mongodb.simple; 20 | 21 | import org.testng.annotations.AfterClass; 22 | import org.testng.annotations.BeforeClass; 23 | import org.testng.annotations.Factory; 24 | import org.testng.annotations.Test; 25 | 26 | @Test 27 | public class RiverMongoDropCollectionDisabledTest extends RiverMongoDropCollectionTest { 28 | 29 | @Factory(dataProvider = "allMongoExecutableTypes") 30 | public RiverMongoDropCollectionDisabledTest(ExecutableType type) { 31 | super(type); 32 | dropCollectionOption = false; 33 | } 34 | 35 | @Override 36 | @BeforeClass 37 | public void createDatabase() { 38 | super.createDatabase(); 39 | } 40 | 41 | @Override 42 | @AfterClass 43 | public void cleanUp() { 44 | super.cleanUp(); 45 | } 46 | 47 | @Override 48 | @Test 49 | public void testDropCollection() throws Throwable { 50 | super.testDropCollection(); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/river/mongodb/util/MongoDBRiverHelper.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb.util; 2 | 3 | import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; 4 | 5 | import java.io.IOException; 6 | 7 | import org.elasticsearch.action.get.GetResponse; 8 | import org.elasticsearch.client.Client; 9 | import org.elasticsearch.common.logging.ESLogger; 10 | import org.elasticsearch.common.logging.Loggers; 11 | import org.elasticsearch.common.xcontent.XContentBuilder; 12 | import org.elasticsearch.common.xcontent.support.XContentMapValues; 13 | import org.elasticsearch.river.mongodb.MongoDBRiver; 14 | import org.elasticsearch.river.mongodb.Status; 15 | 16 | public abstract class MongoDBRiverHelper { 17 | 18 | private static final ESLogger logger = Loggers.getLogger(MongoDBRiverHelper.class); 19 | 20 | public static Status getRiverStatus(Client client, String riverName) { 21 | GetResponse statusResponse = client.prepareGet("_river", riverName, MongoDBRiver.STATUS_ID).get(); 22 | if (!statusResponse.isExists()) { 23 | return Status.UNKNOWN; 24 | } else { 25 | Object obj = XContentMapValues.extractValue(MongoDBRiver.TYPE + "." + MongoDBRiver.STATUS_FIELD, 26 | statusResponse.getSourceAsMap()); 27 | return Status.valueOf(obj.toString()); 28 | } 29 | } 30 | 31 | public static void setRiverStatus(Client client, String riverName, Status status) { 32 | logger.info("setRiverStatus called with {} - {}", riverName, status); 33 | XContentBuilder xb; 34 | try { 35 | xb = jsonBuilder().startObject().startObject(MongoDBRiver.TYPE).field(MongoDBRiver.STATUS_FIELD, status).endObject() 36 | .endObject(); 37 | client.prepareIndex("_river", riverName, MongoDBRiver.STATUS_ID).setSource(xb).get(); 38 | } catch (IOException ioEx) { 39 | logger.error("setRiverStatus failed for river {}", ioEx, riverName); 40 | } 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/embed/TokuPaths.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb.embed; 2 | 3 | import de.flapdoodle.embed.mongo.Command; 4 | import de.flapdoodle.embed.mongo.Paths; 5 | import de.flapdoodle.embed.process.config.store.FileSet; 6 | import de.flapdoodle.embed.process.config.store.FileType; 7 | import de.flapdoodle.embed.process.distribution.BitSize; 8 | import de.flapdoodle.embed.process.distribution.Distribution; 9 | 10 | public class TokuPaths extends Paths { 11 | public TokuPaths() { 12 | super(Command.MongoD); 13 | } 14 | 15 | @Override 16 | public FileSet getFileSet(Distribution distribution) { 17 | String executableFileName; 18 | switch (distribution.getPlatform()) { 19 | case Linux: 20 | case OS_X: 21 | case Solaris: 22 | executableFileName = Command.MongoD.commandName(); 23 | break; 24 | case Windows: 25 | executableFileName = Command.MongoD.commandName() + ".exe"; 26 | break; 27 | default: 28 | throw new IllegalArgumentException("Unknown Platform " + distribution.getPlatform()); 29 | } 30 | FileSet.Builder builder = FileSet.builder().addEntry(FileType.Executable, executableFileName); 31 | builder.addEntry(FileType.Library, "libHotBackup.so"); 32 | builder.addEntry(FileType.Library, "libtokufractaltree.so", ".*/lib64/libtokufractaltree\\.so$"); 33 | builder.addEntry(FileType.Library, "libtokuportability.so"); 34 | return builder.build(); 35 | } 36 | 37 | @Override 38 | public String getPath(Distribution distribution) { 39 | if (distribution.getBitsize() != BitSize.B64) { 40 | throw new IllegalArgumentException("Only 64-bit systems are currently supported"); 41 | } 42 | 43 | String splatform; 44 | switch (distribution.getPlatform()) { 45 | case Linux: 46 | splatform = "linux"; 47 | break; 48 | case Windows: 49 | splatform = "win32"; 50 | break; 51 | case OS_X: 52 | splatform = "osx"; 53 | break; 54 | case Solaris: 55 | splatform = "sunos5"; 56 | break; 57 | default: 58 | throw new IllegalArgumentException("Unknown Platform " + distribution.getPlatform()); 59 | } 60 | 61 | return "tokumx-" + getVersionPart(distribution.getVersion()) + '-' + splatform + "-x86_64-main.tar.gz"; 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/RiverMongoDBGroovyTest.groovy: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb.simple 2 | 3 | import com.gmongo.GMongo 4 | import com.mongodb.BasicDBObject 5 | import com.mongodb.DBCollection 6 | import com.mongodb.WriteConcern 7 | import org.elasticsearch.river.mongodb.RiverMongoDBTestAbstract 8 | import org.elasticsearch.river.mongodb.RiverMongoDBTestAbstract.ExecutableType 9 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest 10 | import org.testng.annotations.* 11 | import org.testng.Assert 12 | import org.elasticsearch.search.SearchHit 13 | 14 | import org.elasticsearch.index.query.QueryBuilders 15 | 16 | class RiverMongoDBGroovyTest extends RiverMongoDBTestAbstract { 17 | 18 | static final int WAIT = 1000 19 | 20 | private def db 21 | private DBCollection dbCollection 22 | 23 | @Factory(dataProvider = "allMongoExecutableTypes") 24 | public RiverMongoDBGroovyTest(ExecutableType type) { 25 | super(type); 26 | } 27 | 28 | @BeforeClass 29 | public void createDatabase() { 30 | db = new GMongo(mongo).getDB(database) 31 | db.setWriteConcern(WriteConcern.REPLICAS_SAFE) 32 | dbCollection = db.createCollection(collection, [:]) 33 | Assert.assertNotNull(dbCollection) 34 | } 35 | 36 | @AfterClass 37 | public void cleanUp() { 38 | db.dropDatabase() 39 | } 40 | 41 | @Test 42 | public void "simple mongodb river test"() { 43 | try { 44 | // Create river 45 | createRiver( 46 | "/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-river.json", river, 3, 47 | database, collection, index 48 | ) 49 | 50 | def document = [ 51 | name: 'test-groovy', 52 | score: 99 53 | ] 54 | 55 | // Insert test document in mongodb 56 | def dbObject = new BasicDBObject(document) 57 | def result = dbCollection.insert(dbObject) 58 | logger.info("WriteResult: $result") 59 | Thread.sleep(WAIT) 60 | 61 | // Assert index exists 62 | def request = new IndicesExistsRequest(index) 63 | assert node.client().admin().indices().exists(request).actionGet().isExists() == true 64 | 65 | // Search data by parent 66 | refreshIndex() 67 | def id = dbObject.get("_id").toString() 68 | def response = node.client().prepareSearch(index).setQuery(QueryBuilders.queryString(id).defaultField("_id")).execute().actionGet() 69 | logger.debug("SearchResponse $response") 70 | 71 | // Asserts data 72 | assert response.hits.totalHits == 1 73 | SearchHit[] hits = response.hits.hits 74 | assert "test-groovy" == hits[0].sourceAsMap().name 75 | 76 | } finally { 77 | super.deleteRiver() 78 | super.deleteIndex() 79 | } 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/plugin/river/mongodb/MongoDBRiverPlugin.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elastic Search and Shay Banon under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. Elastic Search licenses this 6 | * file to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package org.elasticsearch.plugin.river.mongodb; 21 | 22 | import java.util.Collection; 23 | 24 | import org.elasticsearch.common.component.LifecycleComponent; 25 | import org.elasticsearch.common.inject.Module; 26 | import org.elasticsearch.plugins.AbstractPlugin; 27 | import org.elasticsearch.rest.RestModule; 28 | import org.elasticsearch.rest.action.mongodb.RestMongoDBRiverAction; 29 | import org.elasticsearch.river.RiversModule; 30 | import org.elasticsearch.river.mongodb.MongoClientService; 31 | import org.elasticsearch.river.mongodb.NodeLevelModule; 32 | import org.elasticsearch.river.mongodb.MongoDBRiver; 33 | import org.elasticsearch.river.mongodb.MongoDBRiverModule; 34 | 35 | import com.google.common.collect.ImmutableList; 36 | 37 | /** 38 | * @author flaper87 (Flavio Percoco Premoli) 39 | * @author aparo (Alberto Paro) 40 | * @author kryptt (Rodolfo Hansen) 41 | */ 42 | 43 | public class MongoDBRiverPlugin extends AbstractPlugin { 44 | 45 | @Override 46 | public String name() { 47 | return MongoDBRiver.NAME; 48 | } 49 | 50 | @Override 51 | public String description() { 52 | return MongoDBRiver.DESCRIPTION; 53 | } 54 | 55 | @Override 56 | public Collection> services() { 57 | return ImmutableList.>builder().addAll(super.services()).add(MongoClientService.class).build(); 58 | } 59 | 60 | /** 61 | * Node-level modules 62 | */ 63 | @Override 64 | public Collection> modules() { 65 | return ImmutableList.>builder().addAll(super.modules()).add(NodeLevelModule.class).build(); 66 | } 67 | 68 | /** 69 | * Register the MongoDB river 70 | */ 71 | public void onModule(RiversModule module) { 72 | module.registerRiver(MongoDBRiver.TYPE, MongoDBRiverModule.class); 73 | } 74 | 75 | /** 76 | * Register the REST handler 77 | */ 78 | public void onModule(RestModule module) { 79 | module.addRestAction(RestMongoDBRiverAction.class); 80 | } 81 | 82 | } 83 | -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/river/mongodb/rest/XContentThrowableRestResponse.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elasticsearch under one or more contributor 3 | * license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright 5 | * ownership. Elasticsearch licenses this file to you under 6 | * the Apache License, Version 2.0 (the "License"); you may 7 | * not use this file except in compliance with the License. 8 | * You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package org.elasticsearch.river.mongodb.rest; 21 | 22 | import org.elasticsearch.ElasticsearchException; 23 | import org.elasticsearch.common.xcontent.XContentBuilder; 24 | import org.elasticsearch.rest.*; 25 | 26 | import java.io.IOException; 27 | 28 | import static org.elasticsearch.ExceptionsHelper.detailedMessage; 29 | import static org.elasticsearch.river.mongodb.rest.action.support.RestXContentBuilder.restContentBuilder; 30 | 31 | /** 32 | * 33 | */ 34 | public class XContentThrowableRestResponse extends BytesRestResponse { 35 | 36 | public XContentThrowableRestResponse(RestRequest request, Throwable t) throws IOException { 37 | this(request, ((t instanceof ElasticsearchException) ? ((ElasticsearchException) t).status() : RestStatus.INTERNAL_SERVER_ERROR), t); 38 | } 39 | 40 | public XContentThrowableRestResponse(RestRequest request, RestStatus status, Throwable t) throws IOException { 41 | super(status, convert(request, status, t)); 42 | } 43 | 44 | private static XContentBuilder convert(RestRequest request, RestStatus status, Throwable t) throws IOException { 45 | XContentBuilder builder = restContentBuilder(request).startObject() 46 | .field("error", detailedMessage(t)) 47 | .field("status", status.getStatus()); 48 | if (t != null && request.paramAsBoolean("error_trace", false)) { 49 | builder.startObject("error_trace"); 50 | boolean first = true; 51 | while (t != null) { 52 | if (!first) { 53 | builder.startObject("cause"); 54 | } 55 | buildThrowable(t, builder); 56 | if (!first) { 57 | builder.endObject(); 58 | } 59 | t = t.getCause(); 60 | first = false; 61 | } 62 | builder.endObject(); 63 | } 64 | builder.endObject(); 65 | return builder; 66 | } 67 | 68 | private static void buildThrowable(Throwable t, XContentBuilder builder) throws IOException { 69 | builder.field("message", t.getMessage()); 70 | for (StackTraceElement stElement : t.getStackTrace()) { 71 | builder.startObject("at") 72 | .field("class", stElement.getClassName()) 73 | .field("method", stElement.getMethodName()); 74 | if (stElement.getFileName() != null) { 75 | builder.field("file", stElement.getFileName()); 76 | } 77 | if (stElement.getLineNumber() >= 0) { 78 | builder.field("line", stElement.getLineNumber()); 79 | } 80 | builder.endObject(); 81 | } 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/test-include-fields-document-204.json: -------------------------------------------------------------------------------- 1 | { 2 | "gross_weight" : 0, 3 | "fifth_wheel_height" : 0, 4 | "number_of_valves" : 0, 5 | "number_of_cylinders" : 0, 6 | "net_weight" : 0, 7 | "truck_mounted_crane" : { 8 | "checkbox" : false, 9 | "string" : "", 10 | "num" : 0 11 | }, 12 | "central_lubrication" : false, 13 | "right_hand_drive" : false, 14 | "mega" : false, 15 | "toolbox" : false, 16 | "adr" : false, 17 | "hydraulics" : false, 18 | "rto" : false, 19 | "fuel" : "", 20 | "euro" : 0, 21 | "engine_type" : "", 22 | "engine_volume" : 0, 23 | "turbo" : false, 24 | "intercooler" : false, 25 | "injector" : false, 26 | "fuel_tanks" : { 27 | "count" : 0, 28 | "tank1" : 0, 29 | "tank2" : 0, 30 | "tank3" : 0, 31 | "overall" : 0 32 | }, 33 | "gearbox_type" : "", 34 | "gears_number" : "", 35 | "berth" : 0, 36 | "coolers" : [ ], 37 | "security" : [ ], 38 | "exterior" : [ ], 39 | "mirrors_glass" : [ ], 40 | "headlight" : [ ], 41 | "type_of_suspension" : "", 42 | "axles_number" : 0, 43 | "suspension_options" : [ ], 44 | "wheel_formula" : "", 45 | "axes_make" : "", 46 | "wheelbase" : 0, 47 | "brake_options" : [ ], 48 | "rubber" : { 49 | "brakes" : "", 50 | "rest_rpc" : 0, 51 | "rest_mm" : 0, 52 | "size" : 0 53 | }, 54 | "tire_size_on_the_axes" : { 55 | "first_axis" : { 56 | "brakes" : "", 57 | "rest_rpc" : 0, 58 | "rest_mm" : 0, 59 | "size" : 0 60 | }, 61 | "second_axis" : { 62 | "brakes" : "", 63 | "rest_rpc" : 0, 64 | "rest_mm" : 0, 65 | "size" : 0 66 | }, 67 | "third_axis" : { 68 | "brakes" : "", 69 | "rest_rpc" : 0, 70 | "rest_mm" : 0, 71 | "size" : 0 72 | } 73 | }, 74 | "spare_wheel" : [ ], 75 | "year" : { 76 | "month" : "", 77 | "year" : 0 78 | }, 79 | "first_registration" : { 80 | "month" : "", 81 | "year" : 0 82 | }, 83 | "mileage" : 0, 84 | "color_metallic" : { 85 | "color" : "", 86 | "metallic" : false 87 | }, 88 | "fuel_consumption_l_100km" : 0, 89 | "engine_make" : "", 90 | "cabin_type" : "", 91 | "dimensions" : { 92 | "heigth" : 4, 93 | "width" : 3, 94 | "length" : 2 95 | }, 96 | "mark_model" : { 97 | "model" : "eqwwqe", 98 | "mark" : "avia_daewoo" 99 | }, 100 | "aut_heater" : "", 101 | "gearbox_make_model" : { 102 | "model" : "", 103 | "mark" : "" 104 | }, 105 | "system_info" : { 106 | "price" : 12, 107 | "userId" : 227520, 108 | "date" : 1389218400000, 109 | "photos" : [ ], 110 | "sub_goods_refs" : [ ], 111 | "main_goods_ref" : "", 112 | "premium" : false, 113 | "extend_date" : 1389391200000, 114 | "expire_date" : 1394575200000, 115 | "removed" : false 116 | }, 117 | "price" : { 118 | "price" : 12, 119 | "vat_percent" : 0, 120 | "currency" : "uah" 121 | }, 122 | "description" : { 123 | "lng" : "ru", 124 | "text" : "sadzxczxc" 125 | }, 126 | "place" : { 127 | "country" : "6", 128 | "area" : "127", 129 | "default" : false 130 | }, 131 | "_model" : "$category_5200b707916c1489c4487bbd" 132 | } -------------------------------------------------------------------------------- /src/main/java/org/elasticsearch/river/mongodb/rest/action/support/RestXContentBuilder.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elasticsearch under one or more contributor 3 | * license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright 5 | * ownership. Elasticsearch licenses this file to you under 6 | * the Apache License, Version 2.0 (the "License"); you may 7 | * not use this file except in compliance with the License. 8 | * You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package org.elasticsearch.river.mongodb.rest.action.support; 21 | 22 | import org.elasticsearch.common.Nullable; 23 | import org.elasticsearch.common.bytes.BytesReference; 24 | import org.elasticsearch.common.io.stream.BytesStreamOutput; 25 | import org.elasticsearch.common.xcontent.*; 26 | import org.elasticsearch.rest.RestRequest; 27 | 28 | import java.io.IOException; 29 | 30 | /** 31 | * 32 | */ 33 | public class RestXContentBuilder { 34 | 35 | public static XContentBuilder restContentBuilder(RestRequest request) throws IOException { 36 | // use the request body as the auto detect source (if it exists) 37 | return restContentBuilder(request, request.hasContent() ? request.content() : null); 38 | } 39 | 40 | public static XContentBuilder restContentBuilder(RestRequest request, @Nullable BytesReference autoDetectSource) throws IOException { 41 | XContentType contentType = XContentType.fromRestContentType(request.param("format", request.header("Content-Type"))); 42 | if (contentType == null) { 43 | // try and guess it from the auto detect source 44 | if (autoDetectSource != null) { 45 | contentType = XContentFactory.xContentType(autoDetectSource); 46 | } 47 | } 48 | if (contentType == null) { 49 | // default to JSON 50 | contentType = XContentType.JSON; 51 | } 52 | XContentBuilder builder = new XContentBuilder(XContentFactory.xContent(contentType), new BytesStreamOutput()); 53 | if (request.paramAsBoolean("pretty", false)) { 54 | builder.prettyPrint().lfAtEnd(); 55 | } 56 | 57 | builder.humanReadable(request.paramAsBoolean("human", builder.humanReadable())); 58 | 59 | String casing = request.param("case"); 60 | if (casing != null && "camelCase".equals(casing)) { 61 | builder.fieldCaseConversion(XContentBuilder.FieldCaseConversion.CAMELCASE); 62 | } else { 63 | // we expect all REST interfaces to write results in underscore casing, so 64 | // no need for double casing 65 | builder.fieldCaseConversion(XContentBuilder.FieldCaseConversion.NONE); 66 | } 67 | return builder; 68 | } 69 | 70 | public static XContentBuilder emptyBuilder(RestRequest request) throws IOException { 71 | return restContentBuilder(request, request.hasContent() ? request.content() : null).startObject().endObject(); 72 | } 73 | 74 | /** 75 | * Directly writes the source to the output builder 76 | */ 77 | public static void directSource(BytesReference source, XContentBuilder rawBuilder, ToXContent.Params params) throws IOException { 78 | XContentHelper.writeDirect(source, rawBuilder, params); 79 | } 80 | 81 | public static void restDocumentSource(BytesReference source, XContentBuilder builder, ToXContent.Params params) throws IOException { 82 | XContentHelper.writeRawField("_source", source, builder, params); 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /src/site/scripts/app.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var mongoDBRiverApp = angular.module('mongoDBRiverApp', ['ngResource', 'ui.bootstrap']); 4 | 5 | mongoDBRiverApp.constant('appSettings', { 6 | defaultRefresh: 5000 7 | }); 8 | 9 | mongoDBRiverApp.controller('MainCtrl', function ($log, $scope, $resource, $timeout, appSettings) { 10 | var riverResource = $resource('/_river/:type/:river/:action' , {type:'@type', river:'@river', page:'@page'}, 11 | { 12 | list: {method:'GET', params: {action: 'list'}}, 13 | start: {method:'POST', params: {action: 'start'}}, 14 | stop: {method:'POST', params: {action: 'stop'}}, 15 | delete: {method:'POST', params: {action: 'delete'}} 16 | } 17 | ); 18 | var timeoutId; 19 | 20 | $scope.uiSettings = {}; 21 | 22 | $scope.getUISettings = function(riverName) { 23 | if (typeof $scope.uiSettings[riverName] == 'undefined') { 24 | $scope.uiSettings[riverName] = {}; 25 | } 26 | 27 | return $scope.uiSettings[riverName]; 28 | } 29 | 30 | $scope.rivers = []; 31 | $scope.type = null; 32 | $scope.pages = 0; 33 | $scope.page = 0; 34 | $scope.refresh = { label: 'Auto-refresh disabled', enabled: false }; 35 | $scope.next = { label: 'Next Page', enabled: false } 36 | $scope.prev = { label: 'Previous Page', enabled: false } 37 | 38 | $scope.nextPage = function() { 39 | if($scope.next.enabled) { 40 | $scope.list(null, $scope.page+1); 41 | } 42 | } 43 | 44 | $scope.prevPage = function() { 45 | if($scope.prev.enabled) { 46 | $scope.list(null, $scope.page-1); 47 | } 48 | } 49 | 50 | function autoRefresh() { 51 | timeoutId = $timeout(function() { 52 | $scope.list(); 53 | autoRefresh(); 54 | }, appSettings.defaultRefresh); 55 | } 56 | 57 | $scope.updateTimer = function(enabled) { 58 | $log.log('updateTimer - ' + enabled) 59 | if (!enabled) { 60 | autoRefresh(); 61 | $scope.refresh.label = 'Auto-refresh enabled'; 62 | $scope.refresh.enabled = true; 63 | } else { 64 | $timeout.cancel(timeoutId); 65 | timeoutId = null; 66 | $scope.refresh.label = 'Auto-refresh disabled'; 67 | $scope.refresh.enabled = false; 68 | } 69 | } 70 | 71 | $scope.list = function(type, page){ 72 | $log.log('list river type: ' + type); 73 | $scope.type = type || 'mongodb'; 74 | var data = {'type': $scope.type}; 75 | if(page != undefined && page != null) { 76 | data.page = page; 77 | } 78 | var rivers = riverResource.list(data, function() { 79 | $log.log('rivers count: ' + rivers.hits); 80 | $scope.rivers = rivers.results; 81 | $scope.pages = rivers.pages; 82 | $scope.page = rivers.page; 83 | if($scope.page > 1) { 84 | $scope.prev.enabled = true; 85 | } else { 86 | $scope.prev.enabled = false; 87 | } 88 | if($scope.page < $scope.pages) { 89 | $scope.next.enabled = true; 90 | } else { 91 | $scope.next.enabled = false; 92 | } 93 | }); 94 | }; 95 | 96 | $scope.start = function(name){ 97 | $log.log('start: ' + name); 98 | riverResource.start({'type': $scope.type, 'river': name}, function(river, response) { 99 | $scope.list(); 100 | }); 101 | }; 102 | 103 | $scope.stop = function(name){ 104 | $log.log('stop: ' + name); 105 | riverResource.stop({'type': $scope.type, 'river': name}, function() { 106 | $scope.list(); 107 | }); 108 | }; 109 | 110 | $scope.delete = function(name){ 111 | $log.log('delete: ' + name); 112 | riverResource.delete({'type': $scope.type, 'river': name}, function() { 113 | $scope.list(); 114 | }); 115 | }; 116 | 117 | $scope.toString = function(object){ 118 | var value = JSON.stringify(angular.copy(object), undefined, 2); 119 | return value; 120 | }; 121 | 122 | $scope.list(); 123 | }); 124 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/tokumx/RiverTokuMXOplogRefTest.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb.tokumx; 2 | 3 | import static org.elasticsearch.client.Requests.countRequest; 4 | import static org.hamcrest.MatcherAssert.assertThat; 5 | import static org.hamcrest.Matchers.equalTo; 6 | 7 | import java.util.ArrayList; 8 | import java.util.List; 9 | 10 | import org.elasticsearch.action.ActionFuture; 11 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; 12 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; 13 | import org.elasticsearch.action.count.CountResponse; 14 | import org.hamcrest.Matchers; 15 | import org.testng.Assert; 16 | import org.testng.annotations.AfterClass; 17 | import org.testng.annotations.BeforeClass; 18 | import org.testng.annotations.Test; 19 | 20 | import com.mongodb.BasicDBObject; 21 | import com.mongodb.DB; 22 | import com.mongodb.DBCollection; 23 | import com.mongodb.DBCursor; 24 | import com.mongodb.DBObject; 25 | import com.mongodb.WriteConcern; 26 | 27 | @Test 28 | public class RiverTokuMXOplogRefTest extends RiverTokuMXTestAbstract { 29 | 30 | static final String LONG_STRING = 31 | "abcdefghijklmnopqrstuvwxyz abcdefghijklmnopqrstuvwxyz abcdefghijklmnopqrstuvwxyz abcdefghijklmnopqrstuvwxyz"; 32 | 33 | private DB mongoDB; 34 | private DBCollection mongoCollection; 35 | 36 | @BeforeClass 37 | public void createDatabase() { 38 | logger.debug("createDatabase {}", getDatabase()); 39 | try { 40 | mongoDB = getMongo().getDB(getDatabase()); 41 | mongoDB.setWriteConcern(WriteConcern.REPLICAS_SAFE); 42 | super.createRiver(TEST_MONGODB_RIVER_SIMPLE_JSON); 43 | logger.info("Start createCollection"); 44 | this.mongoCollection = mongoDB.createCollection(getCollection(), new BasicDBObject()); 45 | Assert.assertNotNull(mongoCollection); 46 | } catch (Throwable t) { 47 | logger.error("createDatabase failed.", t); 48 | } 49 | } 50 | 51 | @AfterClass 52 | public void cleanUp() { 53 | super.deleteRiver(); 54 | logger.info("Drop database " + mongoDB.getName()); 55 | mongoDB.dropDatabase(); 56 | } 57 | 58 | @Test 59 | public void testOplogRefs() throws InterruptedException { 60 | mongoCollection.insert(buildLargeObject(), WriteConcern.REPLICAS_SAFE); 61 | Thread.sleep(wait); 62 | ActionFuture response = getNode().client().admin().indices() 63 | .exists(new IndicesExistsRequest(getIndex())); 64 | assertThat(response.actionGet().isExists(), equalTo(true)); 65 | refreshIndex(); 66 | CountResponse countResponse = getNode().client().count(countRequest(getIndex())).actionGet(); 67 | assertThat(countResponse.getCount(), Matchers.equalTo(1L)); 68 | try (DBCursor cursor = mongoDB.getSisterDB(LOCAL_DATABASE_NAME).getCollection(OPLOG_COLLECTION) 69 | .find().sort(new BasicDBObject("$natural", -1)).limit(1)) { 70 | DBObject lastOplog = cursor.toArray().get(0); 71 | assertThat(lastOplog.containsField("ref"), Matchers.is(Boolean.TRUE)); 72 | } 73 | } 74 | 75 | private static BasicDBObject buildLargeObject() { 76 | BasicDBObject core = new BasicDBObject(); 77 | for (char c = 'a'; c <= 'z'; ++c) { 78 | core.append("" + c, LONG_STRING); 79 | } 80 | List list1 = new ArrayList(10); 81 | for (int k = 1; k <= 10; ++k) { 82 | list1.add(new BasicDBObject("k", k).append("v", core)); 83 | } 84 | List list2 = new ArrayList(10); 85 | for (int j = 1; j <= 10; ++j) { 86 | list2.add(new BasicDBObject("j", j).append("v", list1)); 87 | } 88 | List list3 = new ArrayList(10); 89 | for (int i = 1; i <= 10; ++i) { 90 | list3.add(new BasicDBObject("i", i).append("v", list2)); 91 | } 92 | return new BasicDBObject("_id", 0).append("o", list3); 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /src/site/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | MongoDB River Plugin for ElasticSearch 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 |
15 |
16 |
17 |
18 |

MongoDB River Administration

19 | {{ refresh.label }} 21 | {{ next.label }} 23 | {{ prev.label }} 25 |
26 | 27 | 28 |
29 |
30 |
31 |
32 |

33 | {{ river.name }} 34 | {{river.status}} 35 | {{river.status}} 36 | {{river.status}} 37 |

38 |
39 |
40 | Last Replicated - {{ river.lastTimestamp ? (river.lastTimestamp | date:'yyyy-MM-dd HH:mm:ss Z') : 'never' }}
41 | Documents Indexed - {{ river.indexCount }} 42 |
43 |
44 | 45 | 46 |
47 |
{{ toString(river.settings) }}
48 |
49 |
50 | Stop 51 | Start 52 | Delete 53 |
54 |
55 |
56 |
57 |
58 | 61 |
62 |
63 |
64 |
65 | 66 | 67 | 68 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/RiverMongoIndexExistsTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elastic Search and Shay Banon under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. Elastic Search licenses this 6 | * file to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | package org.elasticsearch.river.mongodb.simple; 20 | 21 | import static org.elasticsearch.client.Requests.countRequest; 22 | import static org.hamcrest.MatcherAssert.assertThat; 23 | import static org.hamcrest.Matchers.equalTo; 24 | 25 | import org.elasticsearch.action.ActionFuture; 26 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; 27 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; 28 | import org.elasticsearch.river.mongodb.RiverMongoDBTestAbstract; 29 | import org.elasticsearch.river.mongodb.Status; 30 | import org.elasticsearch.river.mongodb.util.MongoDBRiverHelper; 31 | import org.testng.Assert; 32 | import org.testng.annotations.Factory; 33 | import org.testng.annotations.Test; 34 | 35 | import com.google.common.collect.ImmutableMap; 36 | import com.mongodb.BasicDBObject; 37 | import com.mongodb.DB; 38 | import com.mongodb.DBCollection; 39 | import com.mongodb.DBObject; 40 | import com.mongodb.WriteConcern; 41 | import com.mongodb.WriteResult; 42 | 43 | @Test 44 | public class RiverMongoIndexExistsTest extends RiverMongoDBTestAbstract { 45 | 46 | private DB mongoDB; 47 | private DBCollection mongoCollection; 48 | 49 | @Factory(dataProvider = "allMongoExecutableTypes") 50 | public RiverMongoIndexExistsTest(ExecutableType type) { 51 | super(type); 52 | } 53 | 54 | @Test 55 | public void dontDoInitialImportIfCollectionExists() throws Throwable { 56 | logger.debug("Start InitialImport"); 57 | try { 58 | createDatabase(); 59 | 60 | DBObject dbObject1 = new BasicDBObject(ImmutableMap.of("name", "Richard")); 61 | WriteResult result1 = mongoCollection.insert(dbObject1); 62 | logger.info("WriteResult: {}", result1.toString()); 63 | Thread.sleep(wait); 64 | 65 | createRiver(); 66 | Thread.sleep(wait); 67 | 68 | ActionFuture response = getNode().client().admin().indices() 69 | .exists(new IndicesExistsRequest(getIndex())); 70 | assertThat(response.actionGet().isExists(), equalTo(true)); 71 | refreshIndex(); 72 | assertThat(getNode().client().count(countRequest(getIndex())).actionGet().getCount(), equalTo(1l)); 73 | 74 | deleteRiver(); 75 | createRiver(); 76 | 77 | Thread.sleep(wait); 78 | Assert.assertEquals(Status.INITIAL_IMPORT_FAILED, MongoDBRiverHelper.getRiverStatus(getNode().client(), getRiver())); 79 | } catch (Throwable t) { 80 | logger.error("InitialImport failed.", t); 81 | t.printStackTrace(); 82 | throw t; 83 | } finally { 84 | cleanUp(); 85 | } 86 | } 87 | 88 | private void createDatabase() { 89 | logger.debug("createDatabase {}", getDatabase()); 90 | try { 91 | mongoDB = getMongo().getDB(getDatabase()); 92 | mongoDB.setWriteConcern(WriteConcern.REPLICAS_SAFE); 93 | logger.info("Start createCollection"); 94 | mongoCollection = mongoDB.createCollection(getCollection(), new BasicDBObject()); 95 | Assert.assertNotNull(mongoCollection); 96 | } catch (Throwable t) { 97 | logger.error("createDatabase failed.", t); 98 | } 99 | } 100 | 101 | private void createRiver() { 102 | try { 103 | super.createRiver(TEST_MONGODB_RIVER_SIMPLE_JSON); 104 | } catch (Exception ex) { 105 | } 106 | } 107 | 108 | private void cleanUp() { 109 | super.deleteRiver(); 110 | logger.info("Drop database " + mongoDB.getName()); 111 | mongoDB.dropDatabase(); 112 | } 113 | 114 | } 115 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/RiverMongoIncludeCollectionTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elastic Search and Shay Banon under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. Elastic Search licenses this 6 | * file to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | package org.elasticsearch.river.mongodb.simple; 20 | 21 | import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; 22 | import static org.hamcrest.MatcherAssert.assertThat; 23 | import static org.hamcrest.Matchers.equalTo; 24 | 25 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; 26 | import org.elasticsearch.action.count.CountResponse; 27 | import org.elasticsearch.index.query.QueryBuilders; 28 | import org.elasticsearch.river.mongodb.RiverMongoDBTestAbstract; 29 | import org.testng.Assert; 30 | import org.testng.annotations.AfterClass; 31 | import org.testng.annotations.BeforeClass; 32 | import org.testng.annotations.Factory; 33 | import org.testng.annotations.Test; 34 | 35 | import com.mongodb.BasicDBObject; 36 | import com.mongodb.DB; 37 | import com.mongodb.DBCollection; 38 | import com.mongodb.DBObject; 39 | import com.mongodb.WriteConcern; 40 | import com.mongodb.util.JSON; 41 | 42 | @Test 43 | public class RiverMongoIncludeCollectionTest extends RiverMongoDBTestAbstract { 44 | 45 | private static final String TEST_SIMPLE_MONGODB_RIVER_INCLUDE_COLLECTION_JSON = "/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-river-include-collection.json"; 46 | private DB mongoDB; 47 | private DBCollection mongoCollection; 48 | private String includeCollectionOption = "mycollection"; 49 | 50 | @Factory(dataProvider = "allMongoExecutableTypes") 51 | public RiverMongoIncludeCollectionTest(ExecutableType type) { 52 | super(type); 53 | } 54 | 55 | @BeforeClass 56 | public void createDatabase() { 57 | logger.debug("createDatabase {}", getDatabase()); 58 | try { 59 | mongoDB = getMongo().getDB(getDatabase()); 60 | mongoDB.setWriteConcern(WriteConcern.REPLICAS_SAFE); 61 | super.createRiver(TEST_SIMPLE_MONGODB_RIVER_INCLUDE_COLLECTION_JSON, getRiver(), 3, (Object) includeCollectionOption, 62 | (Object) getDatabase(), (Object) getCollection(), (Object) getIndex(), (Object) getDatabase()); 63 | logger.info("Start createCollection"); 64 | mongoCollection = mongoDB.createCollection(getCollection(), new BasicDBObject()); 65 | Assert.assertNotNull(mongoCollection); 66 | } catch (Throwable t) { 67 | logger.error("createDatabase failed.", t); 68 | } 69 | } 70 | 71 | @AfterClass 72 | public void cleanUp() { 73 | super.deleteRiver(); 74 | logger.info("Drop database " + mongoDB.getName()); 75 | mongoDB.dropDatabase(); 76 | } 77 | 78 | @Test 79 | public void testIncludeCollection() throws Throwable { 80 | logger.debug("Start testIncludeCollection"); 81 | try { 82 | String mongoDocument = copyToStringFromClasspath(TEST_SIMPLE_MONGODB_DOCUMENT_JSON); 83 | DBObject dbObject = (DBObject) JSON.parse(mongoDocument); 84 | mongoCollection.insert(dbObject); 85 | Thread.sleep(wait); 86 | 87 | assertThat(getNode().client().admin().indices().exists(new IndicesExistsRequest(getIndex())).actionGet().isExists(), 88 | equalTo(true)); 89 | assertThat(getNode().client().admin().indices().prepareTypesExists(getIndex()).setTypes(getDatabase()).execute().actionGet() 90 | .isExists(), equalTo(true)); 91 | 92 | String collectionName = mongoCollection.getName(); 93 | 94 | refreshIndex(); 95 | 96 | CountResponse countResponse = getNode().client() 97 | .prepareCount(getIndex()).setQuery(QueryBuilders.queryString(collectionName).defaultField(includeCollectionOption)).get(); 98 | assertThat(countResponse.getCount(), equalTo(1L)); 99 | } catch (Throwable t) { 100 | logger.error("testIncludeCollection failed.", t); 101 | t.printStackTrace(); 102 | throw t; 103 | } 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/RiverMongoDbRefTest.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb.simple; 2 | 3 | import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; 4 | import static org.hamcrest.MatcherAssert.assertThat; 5 | import static org.hamcrest.Matchers.equalTo; 6 | 7 | import org.elasticsearch.action.ActionFuture; 8 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; 9 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; 10 | import org.elasticsearch.action.search.SearchRequest; 11 | import org.elasticsearch.action.search.SearchResponse; 12 | import org.elasticsearch.index.query.QueryBuilders; 13 | import org.elasticsearch.index.query.QueryStringQueryBuilder; 14 | import org.elasticsearch.river.mongodb.RiverMongoDBTestAbstract; 15 | import org.testng.Assert; 16 | import org.testng.annotations.AfterClass; 17 | import org.testng.annotations.BeforeClass; 18 | import org.testng.annotations.Factory; 19 | import org.testng.annotations.Test; 20 | 21 | import com.mongodb.BasicDBObject; 22 | import com.mongodb.DB; 23 | import com.mongodb.DBCollection; 24 | import com.mongodb.DBObject; 25 | import com.mongodb.DBRef; 26 | import com.mongodb.WriteConcern; 27 | import com.mongodb.WriteResult; 28 | import com.mongodb.util.JSON; 29 | 30 | public class RiverMongoDbRefTest extends RiverMongoDBTestAbstract { 31 | 32 | private static final String TEST_DBREF_MONGODB_DOCUMENT_JSON = "/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-document-with-dbref.json"; 33 | 34 | private DB mongoDB; 35 | private DBCollection mongoCollection; 36 | 37 | @Factory(dataProvider = "allMongoExecutableTypes") 38 | public RiverMongoDbRefTest(ExecutableType type) { 39 | super(type); 40 | } 41 | 42 | @BeforeClass 43 | public void createDatabase() { 44 | logger.debug("createDatabase {}", getDatabase()); 45 | try { 46 | mongoDB = getMongo().getDB(getDatabase()); 47 | mongoDB.setWriteConcern(WriteConcern.REPLICAS_SAFE); 48 | super.createRiver(TEST_MONGODB_RIVER_SIMPLE_JSON); 49 | logger.info("Start createCollection"); 50 | this.mongoCollection = mongoDB.createCollection(getCollection(), new BasicDBObject()); 51 | Assert.assertNotNull(mongoCollection); 52 | } catch (Throwable t) { 53 | logger.error("createDatabase failed.", t); 54 | } 55 | } 56 | 57 | @AfterClass 58 | public void cleanUp() { 59 | super.deleteRiver(); 60 | logger.info("Drop database " + mongoDB.getName()); 61 | mongoDB.dropDatabase(); 62 | } 63 | 64 | @Test 65 | public void simpleBSONObject() throws Throwable { 66 | logger.debug("Start simpleBSONObject"); 67 | try { 68 | String mongoDocument = copyToStringFromClasspath(TEST_DBREF_MONGODB_DOCUMENT_JSON); 69 | DBObject dbObject = (DBObject) JSON.parse(mongoDocument); 70 | 71 | WriteResult result = mongoCollection.insert(dbObject); 72 | Thread.sleep(wait); 73 | String id = dbObject.get("_id").toString(); 74 | String categoryId = ((DBRef) dbObject.get("category")).getId().toString(); 75 | logger.info("WriteResult: {}", result.toString()); 76 | ActionFuture response = getNode().client().admin().indices() 77 | .exists(new IndicesExistsRequest(getIndex())); 78 | assertThat(response.actionGet().isExists(), equalTo(true)); 79 | refreshIndex(); 80 | SearchRequest search = getNode().client().prepareSearch(getIndex()) 81 | .setQuery(QueryBuilders.queryString(categoryId).defaultField("category.id")).request(); 82 | SearchResponse searchResponse = getNode().client().search(search).actionGet(); 83 | assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); 84 | assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(id)); 85 | 86 | search = getNode().client().prepareSearch(getIndex()).setQuery(new QueryStringQueryBuilder("testing").defaultField("innerDoc.innerThing")) 87 | .request(); 88 | searchResponse = getNode().client().search(search).actionGet(); 89 | assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); 90 | 91 | // search = 92 | // getNode().client().prepareSearch(getIndex()).setQuery(QueryBuilders.geoShapeQuery("location", 93 | // new GeoCircle(new Point, 20.0))) 94 | // .request(); 95 | // searchResponse = getNode().client().search(search).actionGet(); 96 | // assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); 97 | 98 | } catch (Throwable t) { 99 | logger.error("simpleBSONObject failed.", t); 100 | t.printStackTrace(); 101 | throw t; 102 | } 103 | } 104 | 105 | } 106 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/RiverMongoCannotStopTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elastic Search and Shay Banon under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. Elastic Search licenses this 6 | * file to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | package org.elasticsearch.river.mongodb.simple; 20 | 21 | import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; 22 | import static org.hamcrest.MatcherAssert.assertThat; 23 | import static org.hamcrest.Matchers.equalTo; 24 | 25 | import org.elasticsearch.action.ActionFuture; 26 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; 27 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; 28 | import org.elasticsearch.action.search.SearchRequest; 29 | import org.elasticsearch.action.search.SearchResponse; 30 | import org.elasticsearch.index.query.QueryStringQueryBuilder; 31 | import org.elasticsearch.river.mongodb.RiverMongoDBTestAbstract; 32 | import org.elasticsearch.river.mongodb.Status; 33 | import org.elasticsearch.river.mongodb.util.MongoDBRiverHelper; 34 | import org.testng.Assert; 35 | import org.testng.annotations.AfterClass; 36 | import org.testng.annotations.BeforeClass; 37 | import org.testng.annotations.Factory; 38 | import org.testng.annotations.Test; 39 | 40 | import com.mongodb.BasicDBObject; 41 | import com.mongodb.DB; 42 | import com.mongodb.DBCollection; 43 | import com.mongodb.DBObject; 44 | import com.mongodb.WriteConcern; 45 | import com.mongodb.WriteResult; 46 | import com.mongodb.util.JSON; 47 | 48 | @Test 49 | public class RiverMongoCannotStopTest extends RiverMongoDBTestAbstract { 50 | 51 | private DB mongoDB; 52 | private DBCollection mongoCollection; 53 | 54 | @Factory(dataProvider = "allMongoExecutableTypes") 55 | public RiverMongoCannotStopTest(ExecutableType type) { 56 | super(type); 57 | } 58 | 59 | @BeforeClass 60 | public void createDatabase() { 61 | logger.debug("createDatabase {}", getDatabase()); 62 | try { 63 | mongoDB = getMongo().getDB(getDatabase()); 64 | mongoDB.setWriteConcern(WriteConcern.REPLICAS_SAFE); 65 | logger.info("Start createCollection"); 66 | mongoCollection = mongoDB.createCollection(getCollection(), new BasicDBObject()); 67 | Assert.assertNotNull(mongoCollection); 68 | } catch (Throwable t) { 69 | logger.error("createDatabase failed.", t); 70 | } 71 | } 72 | 73 | @AfterClass 74 | public void cleanUp() { 75 | logger.info("Drop database " + mongoDB.getName()); 76 | mongoDB.dropDatabase(); 77 | } 78 | 79 | @Test 80 | public void simpleRiver() throws Throwable { 81 | logger.debug("Start simpleRiver"); 82 | try { 83 | super.createRiver(TEST_MONGODB_RIVER_SIMPLE_JSON); 84 | String mongoDocument = copyToStringFromClasspath(TEST_SIMPLE_MONGODB_DOCUMENT_JSON); 85 | DBObject dbObject = (DBObject) JSON.parse(mongoDocument); 86 | WriteResult result = mongoCollection.insert(dbObject); 87 | Thread.sleep(wait); 88 | dbObject.get("_id").toString(); 89 | logger.info("WriteResult: {}", result.toString()); 90 | ActionFuture response = getNode().client().admin().indices() 91 | .exists(new IndicesExistsRequest(getIndex())); 92 | assertThat(response.actionGet().isExists(), equalTo(true)); 93 | refreshIndex(); 94 | SearchRequest search = getNode().client().prepareSearch(getIndex()).setQuery(new QueryStringQueryBuilder("Richard").defaultField("name")) 95 | .request(); 96 | SearchResponse searchResponse = getNode().client().search(search).actionGet(); 97 | assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); 98 | 99 | super.deleteRiver(); 100 | Thread.sleep(wait); 101 | 102 | Status status = MongoDBRiverHelper.getRiverStatus(getNode().client(), getRiver()); 103 | Assert.assertTrue(status == Status.UNKNOWN); 104 | } catch (Throwable t) { 105 | logger.error("simpleRiver failed.", t); 106 | t.printStackTrace(); 107 | throw t; 108 | } finally { 109 | } 110 | } 111 | 112 | } 113 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/ExcludeFieldsTest.java: -------------------------------------------------------------------------------- 1 | package org.elasticsearch.river.mongodb; 2 | 3 | import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; 4 | 5 | import java.util.Arrays; 6 | import java.util.HashSet; 7 | import java.util.Set; 8 | 9 | import org.elasticsearch.common.logging.ESLogger; 10 | import org.elasticsearch.common.logging.Loggers; 11 | import org.elasticsearch.river.mongodb.util.MongoDBHelper; 12 | import org.testng.Assert; 13 | import org.testng.annotations.Test; 14 | 15 | import com.mongodb.DBObject; 16 | import com.mongodb.util.JSON; 17 | 18 | @Test 19 | public class ExcludeFieldsTest { 20 | 21 | private final ESLogger logger = Loggers.getLogger(getClass()); 22 | 23 | public void testExcludeFields() { 24 | try { 25 | Set excludeFields = new HashSet(Arrays.asList("lastName", "hobbies", "address.apartment")); 26 | // test-exclude-fields-document.json 27 | String mongoDocument = copyToStringFromClasspath("/org/elasticsearch/river/mongodb/test-exclude-fields-document.json"); 28 | DBObject dbObject = (DBObject) JSON.parse(mongoDocument); 29 | logger.debug("Initial BSON object: {}", dbObject); 30 | DBObject filteredObject = MongoDBHelper.applyExcludeFields(dbObject, excludeFields); 31 | logger.debug("Filtered BSON object: {}", filteredObject); 32 | Assert.assertNotNull(filteredObject); 33 | Assert.assertFalse(filteredObject.containsField("hobbies")); 34 | Assert.assertTrue(filteredObject.containsField("address")); 35 | Assert.assertFalse(((DBObject) filteredObject.get("address")).containsField("apartment")); 36 | } catch (Throwable t) { 37 | logger.error("testExcludeFields failed", t); 38 | Assert.fail(); 39 | } 40 | } 41 | 42 | public void testIncludeFields() { 43 | try { 44 | Set includeFields = new HashSet(Arrays.asList("lastName", "hobbies", "address.street")); 45 | // test-exclude-fields-document.json 46 | String mongoDocument = copyToStringFromClasspath("/org/elasticsearch/river/mongodb/test-exclude-fields-document.json"); 47 | DBObject dbObject = (DBObject) JSON.parse(mongoDocument); 48 | logger.debug("Initial BSON object: {}", dbObject); 49 | DBObject filteredObject = MongoDBHelper.applyIncludeFields(dbObject, includeFields); 50 | logger.debug("Filtered BSON object: {}", filteredObject); 51 | Assert.assertNotNull(filteredObject); 52 | Assert.assertFalse(filteredObject.containsField("firstName")); 53 | Assert.assertTrue(filteredObject.containsField("lastName")); 54 | Assert.assertTrue(filteredObject.containsField("hobbies")); 55 | Assert.assertTrue(filteredObject.containsField("address")); 56 | Assert.assertTrue(((DBObject) filteredObject.get("address")).containsField("street")); 57 | Assert.assertFalse(((DBObject) filteredObject.get("address")).containsField("apartment")); 58 | } catch (Throwable t) { 59 | logger.error("testIncludeFields failed", t); 60 | Assert.fail(); 61 | } 62 | } 63 | 64 | public void testIncludeFields204() { 65 | try { 66 | Set includeFields = new HashSet(Arrays.asList("system_info.userId", "place.area", "system_info.date", 67 | "place.country", "system_info.removed", "system_info.premium", "place.default", "system_info.price")); 68 | String mongoDocument = copyToStringFromClasspath("/org/elasticsearch/river/mongodb/test-include-fields-document-204.json"); 69 | DBObject dbObject = (DBObject) JSON.parse(mongoDocument); 70 | logger.debug("Initial BSON object: {}", dbObject); 71 | DBObject filteredObject = MongoDBHelper.applyIncludeFields(dbObject, includeFields); 72 | logger.debug("Filtered BSON object: {}", filteredObject); 73 | Assert.assertNotNull(filteredObject); 74 | Assert.assertFalse(filteredObject.containsField("gross_weight")); 75 | Assert.assertTrue(((DBObject) filteredObject.get("system_info")).containsField("userId")); 76 | Assert.assertTrue(((DBObject) filteredObject.get("place")).containsField("area")); 77 | Assert.assertTrue(((DBObject) filteredObject.get("system_info")).containsField("date")); 78 | Assert.assertTrue(((DBObject) filteredObject.get("place")).containsField("country")); 79 | Assert.assertTrue(((DBObject) filteredObject.get("system_info")).containsField("removed")); 80 | Assert.assertTrue(((DBObject) filteredObject.get("system_info")).containsField("premium")); 81 | Assert.assertTrue(((DBObject) filteredObject.get("place")).containsField("default")); 82 | Assert.assertTrue(((DBObject) filteredObject.get("system_info")).containsField("price")); 83 | Assert.assertFalse(filteredObject.containsField("truck_mounted_crane")); 84 | Assert.assertFalse(((DBObject) filteredObject.get("system_info")).containsField("photos")); 85 | } catch (Throwable t) { 86 | logger.error("testIncludeFields204 failed", t); 87 | Assert.fail(); 88 | } 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/RiverMongoImportAllCollectionsTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elastic Search and Shay Banon under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. Elastic Search licenses this 6 | * file to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | package org.elasticsearch.river.mongodb.simple; 20 | 21 | import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; 22 | import static org.hamcrest.MatcherAssert.assertThat; 23 | import static org.hamcrest.Matchers.equalTo; 24 | 25 | import org.elasticsearch.index.query.QueryBuilders; 26 | import org.elasticsearch.river.mongodb.RiverMongoDBTestAbstract; 27 | import org.testng.Assert; 28 | import org.testng.annotations.AfterClass; 29 | import org.testng.annotations.BeforeClass; 30 | import org.testng.annotations.Factory; 31 | import org.testng.annotations.Test; 32 | 33 | import com.mongodb.BasicDBObject; 34 | import com.mongodb.DB; 35 | import com.mongodb.DBCollection; 36 | import com.mongodb.DBObject; 37 | import com.mongodb.WriteConcern; 38 | import com.mongodb.WriteResult; 39 | import com.mongodb.util.JSON; 40 | 41 | @Test 42 | public class RiverMongoImportAllCollectionsTest extends RiverMongoDBTestAbstract { 43 | 44 | private DB mongoDB; 45 | private DBCollection mongoCollection; 46 | private DBCollection mongoCollection2; 47 | 48 | @Factory(dataProvider = "allMongoExecutableTypes") 49 | public RiverMongoImportAllCollectionsTest(ExecutableType type) { 50 | super(type); 51 | } 52 | 53 | @BeforeClass 54 | public void createDatabase() { 55 | logger.debug("createDatabase {}", getDatabase()); 56 | try { 57 | mongoDB = getMongo().getDB(getDatabase()); 58 | mongoDB.setWriteConcern(WriteConcern.REPLICAS_SAFE); 59 | logger.info("Start createCollection"); 60 | mongoCollection = mongoDB.createCollection(getCollection(), new BasicDBObject()); 61 | Assert.assertNotNull(mongoCollection); 62 | mongoCollection2 = mongoDB.createCollection("collection-" + System.currentTimeMillis(), new BasicDBObject()); 63 | Assert.assertNotNull(mongoCollection2); 64 | createRiver(TEST_MONGODB_RIVER_IMPORT_ALL_COLLECTION_JSON, getRiver(), 3, getDatabase(), getIndex()); 65 | } catch (Throwable t) { 66 | logger.error("createDatabase failed.", t); 67 | } 68 | } 69 | 70 | @AfterClass 71 | public void cleanUp() { 72 | logger.info("Drop database " + mongoDB.getName()); 73 | super.deleteRiver(); 74 | mongoDB.dropDatabase(); 75 | } 76 | 77 | @Test 78 | public void importAllCollectionsTest() throws Throwable { 79 | logger.debug("Start importAllCollectionsTest"); 80 | try { 81 | String mongoDocument = copyToStringFromClasspath(TEST_SIMPLE_MONGODB_DOCUMENT_JSON); 82 | DBObject dbObject = (DBObject) JSON.parse(mongoDocument); 83 | WriteResult result = mongoCollection.insert(dbObject); 84 | Thread.sleep(wait); 85 | String id = dbObject.get("_id").toString(); 86 | logger.info("WriteResult: {}", result.toString()); 87 | refreshIndex(); 88 | Assert.assertNotNull(getNode().client().prepareGet(getIndex(), mongoCollection.getName(), id).get().getId()); 89 | 90 | DBObject dbObject2 = (DBObject) JSON.parse(mongoDocument); 91 | WriteResult result2 = mongoCollection2.insert(dbObject2); 92 | Thread.sleep(wait); 93 | String id2 = dbObject2.get("_id").toString(); 94 | logger.info("WriteResult: {}", result2.toString()); 95 | refreshIndex(); 96 | Assert.assertNotNull(getNode().client().prepareGet(getIndex(), mongoCollection2.getName(), id2).get().getId()); 97 | 98 | mongoCollection.remove(dbObject); 99 | Thread.sleep(wait); 100 | refreshIndex(); 101 | assertThat(getNode().client().prepareCount(getIndex()).setTypes(mongoCollection.getName()).setQuery(QueryBuilders.queryString(id).defaultField("_id")) 102 | .get().getCount(), equalTo(0L)); 103 | 104 | mongoCollection2.remove(dbObject2); 105 | Thread.sleep(wait); 106 | refreshIndex(); 107 | assertThat(getNode().client().prepareCount(getIndex()).setTypes(mongoCollection2.getName()).setQuery(QueryBuilders.queryString(id2).defaultField("_id")) 108 | .get().getCount(), equalTo(0L)); 109 | } catch (Throwable t) { 110 | logger.error("importAllCollectionsTest failed.", t); 111 | t.printStackTrace(); 112 | throw t; 113 | } finally { 114 | } 115 | } 116 | 117 | } 118 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/RiverMongoStoreStatisticsTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elastic Search and Shay Banon under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. Elastic Search licenses this 6 | * file to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | package org.elasticsearch.river.mongodb.simple; 20 | 21 | import static org.elasticsearch.client.Requests.countRequest; 22 | import static org.hamcrest.MatcherAssert.assertThat; 23 | import static org.hamcrest.Matchers.equalTo; 24 | 25 | import org.elasticsearch.action.ActionFuture; 26 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; 27 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; 28 | import org.elasticsearch.river.mongodb.RiverMongoDBTestAbstract; 29 | import org.testng.Assert; 30 | import org.testng.annotations.AfterClass; 31 | import org.testng.annotations.BeforeClass; 32 | import org.testng.annotations.Factory; 33 | import org.testng.annotations.Test; 34 | 35 | import com.google.common.collect.ImmutableMap; 36 | import com.mongodb.BasicDBObject; 37 | import com.mongodb.DB; 38 | import com.mongodb.DBCollection; 39 | import com.mongodb.DBObject; 40 | import com.mongodb.WriteConcern; 41 | import com.mongodb.WriteResult; 42 | 43 | public class RiverMongoStoreStatisticsTest extends RiverMongoDBTestAbstract { 44 | 45 | private DB mongoDB; 46 | private DBCollection mongoCollection; 47 | private final String storeStatsIndex = "stats" + executableType.ordinal() + "-index-" + System.currentTimeMillis(); 48 | private final String storeStatsType = "stats" + executableType.ordinal() + '-' + System.currentTimeMillis(); 49 | 50 | @Factory(dataProvider = "allMongoExecutableTypes") 51 | public RiverMongoStoreStatisticsTest(ExecutableType type) { 52 | super(type); 53 | } 54 | 55 | @Test 56 | public void testStoreStatistics() throws Throwable { 57 | logger.debug("Start testStoreStatistics ({})", executableType); 58 | try { 59 | 60 | DBObject dbObject1 = new BasicDBObject(ImmutableMap.of("name", "Richard")); 61 | WriteResult result1 = mongoCollection.insert(dbObject1); 62 | logger.info("WriteResult: {}", result1.toString()); 63 | Thread.sleep(wait); 64 | 65 | ActionFuture response = getNode().client().admin().indices() 66 | .exists(new IndicesExistsRequest(getIndex())); 67 | assertThat(response.actionGet().isExists(), equalTo(true)); 68 | refreshIndex(); 69 | assertThat(getNode().client().count(countRequest(getIndex())).actionGet().getCount(), equalTo(1l)); 70 | 71 | assertThat(getNode().client().admin().indices().prepareExists(storeStatsIndex).get().isExists(), equalTo(true)); 72 | 73 | assertThat(getNode().client().admin().indices().prepareTypesExists(storeStatsIndex).setTypes(storeStatsType).get().isExists(), 74 | equalTo(true)); 75 | 76 | deleteRiver(); 77 | createRiver(); 78 | 79 | Thread.sleep(wait); 80 | } catch (Throwable t) { 81 | logger.error("testStoreStatistics failed.", t); 82 | t.printStackTrace(); 83 | throw t; 84 | } 85 | } 86 | 87 | @BeforeClass 88 | void setUp() { 89 | createDatabase(); 90 | createRiver(); 91 | } 92 | 93 | private void createDatabase() { 94 | logger.debug("createDatabase {}", getDatabase()); 95 | try { 96 | mongoDB = getMongo().getDB(getDatabase()); 97 | mongoDB.setWriteConcern(WriteConcern.REPLICAS_SAFE); 98 | logger.info("Start createCollection"); 99 | mongoCollection = mongoDB.createCollection(getCollection(), new BasicDBObject()); 100 | Assert.assertNotNull(mongoCollection); 101 | } catch (Throwable t) { 102 | logger.error("createDatabase failed.", t); 103 | } 104 | } 105 | 106 | private void createRiver() { 107 | try { 108 | super.createRiver(TEST_MONGODB_RIVER_STORE_STATISTICS_JSON, getRiver(), 1, 109 | (Object) storeStatsIndex, (Object) storeStatsType, (Object) getDatabase(), (Object) getCollection(), 110 | (Object) getIndex(), (Object) getDatabase()); 111 | 112 | } catch (Exception ex) { 113 | } 114 | } 115 | 116 | @AfterClass 117 | void cleanUp() { 118 | super.deleteRiver(); 119 | Assert.assertTrue(getNode().client().admin().indices().prepareDelete(storeStatsIndex).get().isAcknowledged()); 120 | logger.info("Drop database " + mongoDB.getName()); 121 | mongoDB.dropDatabase(); 122 | } 123 | 124 | } 125 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/RiverMongoCollectionFilterTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elastic Search and Shay Banon under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. Elastic Search licenses this 6 | * file to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | package org.elasticsearch.river.mongodb.simple; 20 | 21 | import static org.elasticsearch.client.Requests.countRequest; 22 | import static org.hamcrest.MatcherAssert.assertThat; 23 | import static org.hamcrest.Matchers.equalTo; 24 | 25 | import org.elasticsearch.action.ActionFuture; 26 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; 27 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; 28 | import org.elasticsearch.river.mongodb.RiverMongoDBTestAbstract; 29 | import org.testng.Assert; 30 | import org.testng.annotations.AfterMethod; 31 | import org.testng.annotations.BeforeMethod; 32 | import org.testng.annotations.Factory; 33 | import org.testng.annotations.Test; 34 | 35 | import com.google.common.collect.ImmutableMap; 36 | import com.mongodb.BasicDBObject; 37 | import com.mongodb.DB; 38 | import com.mongodb.DBCollection; 39 | import com.mongodb.DBObject; 40 | import com.mongodb.WriteConcern; 41 | import com.mongodb.WriteResult; 42 | 43 | @Test 44 | public class RiverMongoCollectionFilterTest extends RiverMongoDBTestAbstract { 45 | 46 | private static final String TEST_SIMPLE_MONGODB_RIVER_COLLECTION_FILTER_JSON = "/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-river-collection-filter.json"; 47 | private DB mongoDB; 48 | private DBCollection mongoCollection; 49 | private Object collectionFilterWithPrefix = "{'o.lang':'de'}"; 50 | private Object collectionFilterNoPrefix = "{'lang':'de'}"; 51 | 52 | @Factory(dataProvider = "allMongoExecutableTypes") 53 | public RiverMongoCollectionFilterTest(ExecutableType type) { 54 | super(type); 55 | } 56 | 57 | @BeforeMethod 58 | private void createDatabase() { 59 | logger.debug("createDatabase {}", getDatabase()); 60 | try { 61 | mongoDB = getMongo().getDB(getDatabase()); 62 | mongoDB.setWriteConcern(WriteConcern.REPLICAS_SAFE); 63 | logger.info("Start createCollection"); 64 | mongoCollection = mongoDB.createCollection(getCollection(), new BasicDBObject()); 65 | Assert.assertNotNull(mongoCollection); 66 | } catch (Throwable t) { 67 | logger.error("createDatabase failed.", t); 68 | } 69 | } 70 | 71 | @AfterMethod 72 | private void cleanUp() { 73 | logger.trace("Delete river {}", getRiver()); 74 | try { 75 | deleteRiver(); 76 | logger.trace("Drop database " + mongoDB.getName()); 77 | mongoDB.dropDatabase(); 78 | } catch (Throwable t) { 79 | logger.warn("cleanUp failed.", t); 80 | } 81 | } 82 | 83 | @Test 84 | public void collectionFilterWithPrefixTest() throws Throwable { 85 | collectionFilterTest(collectionFilterWithPrefix); 86 | } 87 | 88 | @Test 89 | public void collectionFilterNoPrefixTest() throws Throwable { 90 | collectionFilterTest(collectionFilterNoPrefix); 91 | } 92 | 93 | private void collectionFilterTest(Object filter) throws Throwable { 94 | logger.debug("Start CollectionFilter"); 95 | try { 96 | DBObject dbObject1 = new BasicDBObject(ImmutableMap.of("name", "Bernd", "lang", "de")); 97 | WriteResult result1 = mongoCollection.insert(dbObject1); 98 | logger.info("WriteResult: {}", result1.toString()); 99 | dbObject1 = new BasicDBObject(ImmutableMap.of("name", "Richard", "lang", "fr")); 100 | result1 = mongoCollection.insert(dbObject1); 101 | logger.info("WriteResult: {}", result1.toString()); 102 | Thread.sleep(wait); 103 | 104 | createRiver(filter); 105 | Thread.sleep(wait); 106 | 107 | ActionFuture response = getNode().client().admin().indices() 108 | .exists(new IndicesExistsRequest(getIndex())); 109 | assertThat(response.actionGet().isExists(), equalTo(true)); 110 | refreshIndex(); 111 | assertThat(getNode().client().count(countRequest(getIndex())).actionGet().getCount(), equalTo(1l)); 112 | } catch (Throwable t) { 113 | logger.error("CollectionFilter failed.", t); 114 | t.printStackTrace(); 115 | throw t; 116 | } finally { 117 | cleanUp(); 118 | } 119 | } 120 | 121 | private void createRiver(Object filter) throws Exception { 122 | super.createRiver(TEST_SIMPLE_MONGODB_RIVER_COLLECTION_FILTER_JSON, getRiver(), 3, (Object) getDatabase(), 123 | (Object) getCollection(), filter, (Object) getIndex(), (Object) getDatabase()); 124 | } 125 | 126 | } 127 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/advanced/RiverMongoAdvancedTransformationGroovyScriptTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elastic Search and Shay Banon under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. Elastic Search licenses this 6 | * file to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | package org.elasticsearch.river.mongodb.advanced; 20 | 21 | import static org.elasticsearch.client.Requests.countRequest; 22 | import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; 23 | import static org.hamcrest.MatcherAssert.assertThat; 24 | import static org.hamcrest.Matchers.equalTo; 25 | 26 | import org.elasticsearch.action.ActionFuture; 27 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; 28 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; 29 | import org.elasticsearch.action.count.CountResponse; 30 | import org.elasticsearch.river.mongodb.RiverMongoDBTestAbstract; 31 | import org.testng.Assert; 32 | import org.testng.annotations.AfterClass; 33 | import org.testng.annotations.BeforeClass; 34 | import org.testng.annotations.Factory; 35 | import org.testng.annotations.Test; 36 | 37 | import com.mongodb.BasicDBObject; 38 | import com.mongodb.DB; 39 | import com.mongodb.DBCollection; 40 | import com.mongodb.DBObject; 41 | import com.mongodb.WriteConcern; 42 | import com.mongodb.WriteResult; 43 | import com.mongodb.util.JSON; 44 | 45 | @Test 46 | public class RiverMongoAdvancedTransformationGroovyScriptTest extends RiverMongoDBTestAbstract { 47 | 48 | private static final String GROOVY_SCRIPT_TYPE = "groovy"; 49 | public static final String TEST_MONGODB_RIVER_WITH_ADVANCED_TRANSFORMATION_JSON = "/org/elasticsearch/river/mongodb/advanced/test-mongodb-river-with-advanced-transformation.json"; 50 | private DB mongoDB; 51 | private DBCollection mongoCollection; 52 | 53 | @Factory(dataProvider = "allMongoExecutableTypes") 54 | public RiverMongoAdvancedTransformationGroovyScriptTest(ExecutableType type) { 55 | super(type); 56 | } 57 | 58 | @BeforeClass 59 | public void createDatabase() { 60 | logger.debug("createDatabase {}", getDatabase()); 61 | try { 62 | mongoDB = getMongo().getDB(getDatabase()); 63 | mongoDB.setWriteConcern(WriteConcern.REPLICAS_SAFE); 64 | 65 | logger.info("Start createCollection"); 66 | mongoCollection = mongoDB.createCollection(getCollection(), new BasicDBObject()); 67 | Assert.assertNotNull(mongoCollection); 68 | } catch (Throwable t) { 69 | logger.error("createDatabase failed.", t); 70 | } 71 | } 72 | 73 | @AfterClass 74 | public void cleanUp() { 75 | logger.info("Drop database " + mongoDB.getName()); 76 | mongoDB.dropDatabase(); 77 | } 78 | 79 | @Test 80 | public void testSimpleTransformationScript() throws Throwable { 81 | logger.debug("Start testSimpleTransformationScript"); 82 | String river = "testsimpletransformationscriptgroovyriver-" + System.currentTimeMillis(); 83 | String index = "testsimpletransformationscriptgroovyindex-" + System.currentTimeMillis(); 84 | try { 85 | logger.debug("Create river {}", river); 86 | String script = "ctx.documents << [data: [id: 12345, name: '99'], operation: 'i'] "; 87 | script += "<< [data: [id: 6666, name: 'document-ignored'], ignore: true] "; 88 | super.createRiver(TEST_MONGODB_RIVER_WITH_ADVANCED_TRANSFORMATION_JSON, river, 3, (Object) "[]", getDatabase(), getCollection(), 89 | GROOVY_SCRIPT_TYPE, script, index, getDatabase()); 90 | 91 | String mongoDocument = copyToStringFromClasspath(TEST_SIMPLE_MONGODB_DOCUMENT_JSON); 92 | DBObject dbObject = (DBObject) JSON.parse(mongoDocument); 93 | WriteResult result = mongoCollection.insert(dbObject); 94 | Thread.sleep(wait); 95 | logger.info("WriteResult: {}", result.toString()); 96 | refreshIndex(index); 97 | 98 | ActionFuture response = getNode().client().admin().indices().exists(new IndicesExistsRequest(index)); 99 | assertThat(response.actionGet().isExists(), equalTo(true)); 100 | CountResponse countResponse = getNode().client().count(countRequest(index)).actionGet(); 101 | logger.info("Document count: {}", countResponse.getCount()); 102 | assertThat(countResponse.getCount(), equalTo(2l)); 103 | 104 | mongoCollection.remove(dbObject); 105 | 106 | } catch (Throwable t) { 107 | logger.error("testSimpleTransformationScript failed.", t); 108 | t.printStackTrace(); 109 | throw t; 110 | } finally { 111 | super.deleteRiver(river); 112 | super.deleteIndex(index); 113 | logger.debug("End testSimpleTransformationScript"); 114 | } 115 | } 116 | 117 | } 118 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/RiverMongoExcludeFieldsTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elastic Search and Shay Banon under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. Elastic Search licenses this 6 | * file to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | package org.elasticsearch.river.mongodb.simple; 20 | 21 | import static org.hamcrest.MatcherAssert.assertThat; 22 | import static org.hamcrest.Matchers.equalTo; 23 | 24 | import java.util.Map; 25 | 26 | import org.bson.types.ObjectId; 27 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; 28 | import org.elasticsearch.action.search.SearchResponse; 29 | import org.elasticsearch.index.query.QueryBuilders; 30 | import org.elasticsearch.river.mongodb.RiverMongoDBTestAbstract; 31 | import org.testng.Assert; 32 | import org.testng.annotations.AfterClass; 33 | import org.testng.annotations.BeforeClass; 34 | import org.testng.annotations.Factory; 35 | import org.testng.annotations.Test; 36 | 37 | import com.mongodb.BasicDBObject; 38 | import com.mongodb.DB; 39 | import com.mongodb.DBCollection; 40 | import com.mongodb.DBObject; 41 | import com.mongodb.WriteConcern; 42 | 43 | @Test 44 | public class RiverMongoExcludeFieldsTest extends RiverMongoDBTestAbstract { 45 | 46 | private DB mongoDB; 47 | private DBCollection mongoCollection; 48 | protected boolean dropCollectionOption = true; 49 | 50 | @Factory(dataProvider = "allMongoExecutableTypes") 51 | public RiverMongoExcludeFieldsTest(ExecutableType type) { 52 | super(type); 53 | } 54 | 55 | @BeforeClass 56 | public void createDatabase() { 57 | logger.debug("createDatabase {}", getDatabase()); 58 | try { 59 | mongoDB = getMongo().getDB(getDatabase()); 60 | mongoDB.setWriteConcern(WriteConcern.REPLICAS_SAFE); 61 | super.createRiver(TEST_MONGODB_RIVER_EXCLUDE_FIELDS_JSON, getRiver(), 3, 62 | (Object) "[\"exclude-field-1\", \"exclude-field-2\"]", (Object) getDatabase(), (Object) getCollection(), 63 | (Object) getIndex(), (Object) getDatabase()); 64 | logger.info("Start createCollection"); 65 | mongoCollection = mongoDB.createCollection(getCollection(), new BasicDBObject()); 66 | Assert.assertNotNull(mongoCollection); 67 | } catch (Throwable t) { 68 | logger.error("createDatabase failed.", t); 69 | } 70 | } 71 | 72 | @AfterClass 73 | public void cleanUp() { 74 | super.deleteRiver(); 75 | logger.info("Drop database " + mongoDB.getName()); 76 | mongoDB.dropDatabase(); 77 | } 78 | 79 | @Test 80 | public void testExcludeFields() throws Throwable { 81 | logger.debug("Start testExcludeFields"); 82 | try { 83 | DBObject dbObject = new BasicDBObject(); 84 | dbObject.put("exclude-field-1", System.currentTimeMillis()); 85 | dbObject.put("exclude-field-2", System.currentTimeMillis()); 86 | dbObject.put("include-field-1", System.currentTimeMillis()); 87 | mongoCollection.insert(dbObject); 88 | Thread.sleep(wait); 89 | String id = dbObject.get("_id").toString(); 90 | assertThat(getNode().client().admin().indices().exists(new IndicesExistsRequest(getIndex())).actionGet().isExists(), 91 | equalTo(true)); 92 | refreshIndex(); 93 | 94 | SearchResponse sr = getNode().client().prepareSearch(getIndex()).setQuery(QueryBuilders.queryString(id).defaultField("_id")) 95 | .get(); 96 | logger.debug("SearchResponse {}", sr.toString()); 97 | long totalHits = sr.getHits().getTotalHits(); 98 | logger.debug("TotalHits: {}", totalHits); 99 | assertThat(totalHits, equalTo(1l)); 100 | 101 | Map object = sr.getHits().getHits()[0].sourceAsMap(); 102 | assertThat(object.containsKey("exclude-field-1"), equalTo(false)); 103 | assertThat(object.containsKey("exclude-field-2"), equalTo(false)); 104 | assertThat(object.containsKey("include-field-1"), equalTo(true)); 105 | 106 | // Update Mongo object 107 | dbObject = mongoCollection.findOne(new BasicDBObject("_id", new ObjectId(id))); 108 | dbObject.put("include-field-2", System.currentTimeMillis()); 109 | mongoCollection.save(dbObject); 110 | Thread.sleep(wait); 111 | 112 | sr = getNode().client().prepareSearch(getIndex()).setQuery(QueryBuilders.queryString(id).defaultField("_id")).get(); 113 | logger.debug("SearchResponse {}", sr.toString()); 114 | totalHits = sr.getHits().getTotalHits(); 115 | logger.debug("TotalHits: {}", totalHits); 116 | assertThat(totalHits, equalTo(1l)); 117 | 118 | object = sr.getHits().getHits()[0].sourceAsMap(); 119 | assertThat(object.containsKey("exclude-field-1"), equalTo(false)); 120 | assertThat(object.containsKey("exclude-field-2"), equalTo(false)); 121 | assertThat(object.containsKey("include-field-1"), equalTo(true)); 122 | assertThat(object.containsKey("include-field-2"), equalTo(true)); 123 | } catch (Throwable t) { 124 | logger.error("testExcludeFields failed.", t); 125 | t.printStackTrace(); 126 | throw t; 127 | } 128 | } 129 | 130 | } 131 | -------------------------------------------------------------------------------- /src/test/java/org/elasticsearch/river/mongodb/simple/RiverMongoIncludeFieldsTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to Elastic Search and Shay Banon under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. Elastic Search licenses this 6 | * file to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | package org.elasticsearch.river.mongodb.simple; 20 | 21 | import static org.hamcrest.MatcherAssert.assertThat; 22 | import static org.hamcrest.Matchers.equalTo; 23 | 24 | import java.util.Map; 25 | 26 | import org.bson.types.ObjectId; 27 | import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; 28 | import org.elasticsearch.action.search.SearchResponse; 29 | import org.elasticsearch.index.query.QueryBuilders; 30 | import org.elasticsearch.river.mongodb.RiverMongoDBTestAbstract; 31 | import org.testng.Assert; 32 | import org.testng.annotations.AfterClass; 33 | import org.testng.annotations.BeforeClass; 34 | import org.testng.annotations.Factory; 35 | import org.testng.annotations.Test; 36 | 37 | import com.mongodb.BasicDBObject; 38 | import com.mongodb.DB; 39 | import com.mongodb.DBCollection; 40 | import com.mongodb.DBObject; 41 | import com.mongodb.WriteConcern; 42 | 43 | @Test 44 | public class RiverMongoIncludeFieldsTest extends RiverMongoDBTestAbstract { 45 | 46 | private DB mongoDB; 47 | private DBCollection mongoCollection; 48 | protected boolean dropCollectionOption = true; 49 | 50 | @Factory(dataProvider = "allMongoExecutableTypes") 51 | public RiverMongoIncludeFieldsTest(ExecutableType type) { 52 | super(type); 53 | } 54 | 55 | @BeforeClass 56 | public void createDatabase() { 57 | logger.debug("createDatabase {}", getDatabase()); 58 | try { 59 | mongoDB = getMongo().getDB(getDatabase()); 60 | mongoDB.setWriteConcern(WriteConcern.REPLICAS_SAFE); 61 | super.createRiver(TEST_MONGODB_RIVER_INCLUDE_FIELDS_JSON, getRiver(), 3, 62 | (Object) "[\"include-field-1\", \"include-field-2\"]", (Object) getDatabase(), (Object) getCollection(), 63 | (Object) getIndex(), (Object) getDatabase()); 64 | logger.info("Start createCollection"); 65 | mongoCollection = mongoDB.createCollection(getCollection(), new BasicDBObject()); 66 | Assert.assertNotNull(mongoCollection); 67 | } catch (Throwable t) { 68 | logger.error("createDatabase failed.", t); 69 | } 70 | } 71 | 72 | @AfterClass 73 | public void cleanUp() { 74 | super.deleteRiver(); 75 | logger.info("Drop database " + mongoDB.getName()); 76 | mongoDB.dropDatabase(); 77 | } 78 | 79 | @Test 80 | public void testIncludeFields() throws Throwable { 81 | logger.debug("Start testIncludeFields"); 82 | try { 83 | DBObject dbObject = new BasicDBObject(); 84 | dbObject.put("include-field-1", System.currentTimeMillis()); 85 | dbObject.put("include-field-2", System.currentTimeMillis()); 86 | dbObject.put("field-3", System.currentTimeMillis()); 87 | mongoCollection.insert(dbObject); 88 | Thread.sleep(wait); 89 | String id = dbObject.get("_id").toString(); 90 | assertThat(getNode().client().admin().indices().exists(new IndicesExistsRequest(getIndex())).actionGet().isExists(), 91 | equalTo(true)); 92 | refreshIndex(); 93 | 94 | SearchResponse sr = getNode().client().prepareSearch(getIndex()).setQuery(QueryBuilders.queryString(id).defaultField("_id")) 95 | .execute().actionGet(); 96 | logger.debug("SearchResponse {}", sr.toString()); 97 | long totalHits = sr.getHits().getTotalHits(); 98 | logger.debug("TotalHits: {}", totalHits); 99 | assertThat(totalHits, equalTo(1l)); 100 | 101 | Map object = sr.getHits().getHits()[0].sourceAsMap(); 102 | assertThat(object.containsKey("include-field-1"), equalTo(true)); 103 | assertThat(object.containsKey("include-field-2"), equalTo(true)); 104 | assertThat(object.containsKey("field-3"), equalTo(false)); 105 | 106 | // Update Mongo object 107 | dbObject = mongoCollection.findOne(new BasicDBObject("_id", new ObjectId(id))); 108 | dbObject.put("field-4", System.currentTimeMillis()); 109 | mongoCollection.save(dbObject); 110 | Thread.sleep(wait); 111 | 112 | sr = getNode().client().prepareSearch(getIndex()).setQuery(QueryBuilders.queryString(id).defaultField("_id")).execute() 113 | .actionGet(); 114 | logger.debug("SearchResponse {}", sr.toString()); 115 | totalHits = sr.getHits().getTotalHits(); 116 | logger.debug("TotalHits: {}", totalHits); 117 | assertThat(totalHits, equalTo(1l)); 118 | 119 | object = sr.getHits().getHits()[0].sourceAsMap(); 120 | assertThat(object.containsKey("include-field-1"), equalTo(true)); 121 | assertThat(object.containsKey("include-field-2"), equalTo(true)); 122 | assertThat(object.containsKey("field-3"), equalTo(false)); 123 | assertThat(object.containsKey("field-4"), equalTo(false)); 124 | } catch (Throwable t) { 125 | logger.error("testIncludeFields failed.", t); 126 | t.printStackTrace(); 127 | throw t; 128 | } 129 | } 130 | 131 | } 132 | --------------------------------------------------------------------------------