├── .github └── workflows │ ├── add-depr-ticket-to-depr-board.yml │ ├── add-remove-label-on-comment.yml │ ├── ci.yml │ ├── codeql-analysis.yml │ ├── commitlint.yml │ └── self-assign-issue.yml ├── .gitignore ├── .ruby-gemset ├── .tx └── config ├── AUTHORS ├── CHANGELOG.rst ├── Gemfile ├── Gemfile.lock ├── Gemfile3 ├── Gemfile3.lock ├── Guardfile ├── LICENSE.txt ├── Procfile ├── README.rst ├── Rakefile ├── api ├── comment_threads.rb ├── commentables.rb ├── comments.rb ├── flags.rb ├── notifications.rb ├── notifications_and_subscriptions.rb ├── pins.rb ├── search.rb ├── users.rb └── votes.rb ├── app.rb ├── bin ├── rake ├── rspec └── unicorn ├── catalog-info.yaml ├── config.ru ├── config ├── application.yml ├── benchmark.yml.sample ├── mongoid.yml ├── newrelic.yml ├── unicorn.heroku.rb ├── unicorn.rb └── unicorn_tcp.rb ├── lib ├── helpers.rb ├── task_helpers.rb ├── tasks │ ├── benchmark.rake │ ├── db.rake │ ├── deep_search.rake │ ├── flags.rake │ ├── i18n.rake │ ├── jobs.rake │ ├── kpis.rake │ ├── search.rake │ └── user_stats.rake ├── unicorn_helpers.rb └── utils.rb ├── locale ├── en-US.yml └── x-test.yml ├── models ├── activity.rb ├── comment.rb ├── comment_thread.rb ├── commentable.rb ├── concerns │ └── searchable.rb ├── constants.rb ├── content.rb ├── edit_history.rb ├── notification.rb ├── observers │ ├── at_user_observer.rb │ ├── post_reply_observer.rb │ └── post_topic_observer.rb ├── subscription.rb └── user.rb ├── mongoutil.rb ├── presenters ├── thread.rb ├── thread_list.rb └── thread_utils.rb ├── scripts ├── db │ ├── migrate-001-sk-author_username.js │ ├── migrate-002-update-indexes.js │ ├── migrate-003-update-indexes.js │ ├── migrate-004-update-indexes.js │ ├── migrate-005-update-indexes.js │ ├── migrate-006-remove-email.js │ ├── migrate-007-thread-type.js │ ├── migrate-008-context.js │ ├── migrate-009-comment_thread-author-created_at-indexes.js │ ├── migrate-010-user-course_stats-course_id-index.js │ ├── revert-migrate-001-sk-author_username.js │ ├── revert-migrate-002-update-indexes.js │ ├── revert-migrate-003-update-indexes.js │ ├── revert-migrate-004-update-indexes.js │ ├── revert-migrate-005-update-indexes.js │ ├── revert-migrate-006-remove-email.js │ ├── revert-migrate-007-thread-type.js │ ├── revert-migrate-008-context.js │ └── revert-migrate-010-user-course_stats-course_id-index.js └── es │ ├── copy-index.sh │ ├── incremental-copy-index.sh │ ├── migrate-index.sh │ └── query-max-date.json └── spec ├── api ├── abuse_spec.rb ├── comment_spec.rb ├── comment_thread_spec.rb ├── commentable_spec.rb ├── i18n_spec.rb ├── notifications_and_subscriptions_spec.rb ├── notifications_spec.rb ├── query_spec.rb ├── search_spec.rb ├── user_spec.rb └── vote_spec.rb ├── app_spec.rb ├── factories.rb ├── lib ├── task_helpers_spec.rb ├── tasks │ └── search_rake_spec.rb └── unicorn_helpers_spec.rb ├── models ├── at_user_observer_spec.rb ├── comment_spec.rb ├── comment_thread_spec.rb └── user_spec.rb ├── presenters ├── thread_list_spec.rb └── thread_spec.rb ├── spec_helper.rb ├── support ├── database_cleaner.rb ├── elasticsearch.rb ├── factory_bot.rb ├── matchers.rb └── rake.rb └── unicode_shared_examples.rb /.github/workflows/add-depr-ticket-to-depr-board.yml: -------------------------------------------------------------------------------- 1 | # Run the workflow that adds new tickets that are either: 2 | # - labelled "DEPR" 3 | # - title starts with "[DEPR]" 4 | # - body starts with "Proposal Date" (this is the first template field) 5 | # to the org-wide DEPR project board 6 | 7 | name: Add newly created DEPR issues to the DEPR project board 8 | 9 | on: 10 | issues: 11 | types: [opened] 12 | 13 | jobs: 14 | routeissue: 15 | uses: openedx/.github/.github/workflows/add-depr-ticket-to-depr-board.yml@master 16 | secrets: 17 | GITHUB_APP_ID: ${{ secrets.GRAPHQL_AUTH_APP_ID }} 18 | GITHUB_APP_PRIVATE_KEY: ${{ secrets.GRAPHQL_AUTH_APP_PEM }} 19 | SLACK_BOT_TOKEN: ${{ secrets.SLACK_ISSUE_BOT_TOKEN }} 20 | -------------------------------------------------------------------------------- /.github/workflows/add-remove-label-on-comment.yml: -------------------------------------------------------------------------------- 1 | # This workflow runs when a comment is made on the ticket 2 | # If the comment starts with "label: " it tries to apply 3 | # the label indicated in rest of comment. 4 | # If the comment starts with "remove label: ", it tries 5 | # to remove the indicated label. 6 | # Note: Labels are allowed to have spaces and this script does 7 | # not parse spaces (as often a space is legitimate), so the command 8 | # "label: really long lots of words label" will apply the 9 | # label "really long lots of words label" 10 | 11 | name: Allows for the adding and removing of labels via comment 12 | 13 | on: 14 | issue_comment: 15 | types: [created] 16 | 17 | jobs: 18 | add_remove_labels: 19 | uses: openedx/.github/.github/workflows/add-remove-label-on-comment.yml@master 20 | 21 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: RUBY CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - "**" 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | ruby-version: ['3.1.4', '3.3.0'] 17 | mongodb-version: ['7'] 18 | include: 19 | - ruby-version: '3.1.4' 20 | gemfile: Gemfile 21 | allow-failure: false 22 | - ruby-version: '3.3.0' 23 | gemfile: Gemfile 24 | allow-failure: false 25 | 26 | env: 27 | SEARCH_SERVER_ES7: http://localhost:9200 28 | MONGOHQ_URL: mongodb://localhost:27017/cs_comments_service_test 29 | MONGOID_AUTH_MECH: "" 30 | NEW_RELIC_ENABLE: false 31 | RACK_ENV: staging 32 | SINATRA_ENV: staging 33 | API_KEY: password 34 | BUNDLE_GEMFILE: ${{ github.workspace }}/${{ matrix.gemfile }} 35 | RACK_TIMEOUT_SERVICE_TIMEOUT: 20 36 | services: 37 | elasticsearch: 38 | image: elasticsearch:7.8.0 39 | env: 40 | discovery.type: single-node 41 | bootstrap.memory_lock: true 42 | ES_JAVA_OPTS: -Xms512m -Xmx512m 43 | network.host: 0.0.0.0 44 | cluster.routing.allocation.disk.watermark.low: 150mb 45 | cluster.routing.allocation.disk.watermark.high: 100mb 46 | cluster.routing.allocation.disk.watermark.flood_stage: 50mb 47 | ports: 48 | - 9200:9200 49 | mongodb: 50 | image: mongo:${{ matrix.mongodb-version }} 51 | ports: 52 | - 27017:27017 53 | steps: 54 | - name: Checkout 55 | uses: actions/checkout@v3 56 | 57 | - name: Set up Ruby ${{ matrix.ruby-version }} 58 | uses: ruby/setup-ruby@v1 59 | with: 60 | ruby-version: ${{ matrix.ruby-version }} 61 | bundler-cache: true 62 | 63 | - name: Install dependencies 64 | run: bundle install 65 | 66 | - name: Run tests 67 | run: bin/rspec -fd 68 | continue-on-error: ${{ matrix.allow-failure }} 69 | - name: Send test coverage report to codecov.io 70 | uses: codecov/codecov-action@v4 71 | with: 72 | lags: unittests 73 | fail_ci_if_error: true 74 | token: ${{ secrets.CODECOV_TOKEN }} 75 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | schedule: 9 | - cron: '37 18 * * 3' 10 | 11 | jobs: 12 | analyze: 13 | name: Analyze 14 | runs-on: ubuntu-latest 15 | permissions: 16 | actions: read 17 | contents: read 18 | security-events: write 19 | 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | language: [ 'javascript', 'ruby' ] 24 | 25 | steps: 26 | - name: Checkout repository 27 | uses: actions/checkout@v2 28 | 29 | # Initializes the CodeQL tools for scanning. 30 | - name: Initialize CodeQL 31 | uses: github/codeql-action/init@v1 32 | with: 33 | languages: ${{ matrix.language }} 34 | 35 | - name: Perform CodeQL Analysis 36 | uses: github/codeql-action/analyze@v1 37 | -------------------------------------------------------------------------------- /.github/workflows/commitlint.yml: -------------------------------------------------------------------------------- 1 | # Run commitlint on the commit messages in a pull request. 2 | 3 | name: Lint Commit Messages 4 | 5 | on: 6 | - pull_request 7 | 8 | jobs: 9 | commitlint: 10 | uses: openedx/.github/.github/workflows/commitlint.yml@master 11 | -------------------------------------------------------------------------------- /.github/workflows/self-assign-issue.yml: -------------------------------------------------------------------------------- 1 | # This workflow runs when a comment is made on the ticket 2 | # If the comment starts with "assign me" it assigns the author to the 3 | # ticket (case insensitive) 4 | 5 | name: Assign comment author to ticket if they say "assign me" 6 | on: 7 | issue_comment: 8 | types: [created] 9 | 10 | jobs: 11 | self_assign_by_comment: 12 | uses: openedx/.github/.github/workflows/self-assign-issue.yml@master 13 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.gem 2 | *.rbc 3 | .bundle 4 | .config 5 | coverage 6 | InstalledFiles 7 | lib/bundler/man 8 | pkg 9 | rdoc 10 | spec/reports 11 | test/tmp 12 | test/version_tmp 13 | tmp 14 | 15 | # YARD artifacts 16 | .yardoc 17 | _yardoc 18 | doc/ 19 | 20 | *.sqlite3 21 | .*.swp 22 | .*.swo 23 | .*.swm 24 | *.pid 25 | 26 | config/benchmark.yml 27 | benchmark_log 28 | 29 | log/ 30 | #redcar 31 | .redcar/ 32 | /nbproject 33 | .idea/ 34 | 35 | .ruby-version 36 | 37 | # Store status of tests to allow reruning failures 38 | .rspec-test-status -------------------------------------------------------------------------------- /.ruby-gemset: -------------------------------------------------------------------------------- 1 | cs_comments_service 2 | -------------------------------------------------------------------------------- /.tx/config: -------------------------------------------------------------------------------- 1 | [main] 2 | host = https://www.transifex.com 3 | 4 | [o:open-edx:p:edx-platform:r:comments-service] 5 | file_filter = locale/.yml 6 | source_file = locale/en-US.yml 7 | source_lang = en-US 8 | lang_map = en_CA: en-CA, is_IS: is-IS, ml_IN: ml-IN, ar_BH: ar-BH, ar_OM: ar-OM, ar_TN: ar-TN, fr_CH: fr-CH, vi_VN: vi-VN, zh_HK: zh-HK, am_ET: am-ET, en_BZ: en-BZ, es_GT: es-GT, es_DO: es-DO, et_EE: et-EE, lt_LT: lt-LT, pt_PT: pt-PT, qut_GT: qut-GT, de_DE: de-DE, en_AU: en-AU, en_PH: en-PH, sa_IN: sa-IN, tr_TR: tr-TR, se_FI: se-FI, sms_FI: sms-FI, sv_SE: sv-SE, es_PA: es-PA, fi_FI: fi-FI, gl_ES: gl-ES, ko_KR: ko-KR, lv_LV: lv-LV, mi_NZ: mi-NZ, pt_BR: pt-BR, quz_PE: quz-PE, en_TT: en-TT, gd_GB: gd-GB, hy_AM: hy-AM, zh_TW: zh-TW, hu_HU: hu-HU, it_IT: it-IT, mn_CN: mn-CN, ru_RU: ru-RU, sw_KE: sw-KE, be_BY: be-BY, en_ZA: en-ZA, gsw_FR: gsw-FR, tzm_DZ: tzm-DZ, tt_RU: tt-RU, es_EC: es-EC, kl_GL: kl-GL, ta_IN: ta-IN, fy_NL: fy-NL, sah_RU: sah-RU, sl_SI: sl-SI, ar_IQ: ar-IQ, ar_QA: ar-QA, es_PE: es-PE, mt_MT: mt-MT, oc_FR: oc-FR, prs_AF: prs-AF, se_SE: se-SE, sma_NO: sma-NO, ar_MA: ar-MA, bs_BA: bs-BA, ca_ES: ca-ES, smn_FI: smn-FI, zh_SG: zh-SG, en_JM: en-JM, es_NI: es-NI, mr_IN: mr-IN, nb_NO: nb-NO, ro_RO: ro-RO, ar_LY: ar-LY, cs_CZ: cs-CZ, de_AT: de-AT, syr_SY: syr-SY, uz_UZ: uz-UZ, sq_AL: sq-AL, sv_FI: sv-FI, hr_BA: hr-BA, kn_IN: kn-IN, sk_SK: sk-SK, en_MY: en-MY, en_SG: en-SG, fo_FO: fo-FO, iu_CA: iu-CA, nn_NO: nn-NO, ar_SY: ar-SY, bn_IN: bn-IN, cy_GB: cy-GB, ug_CN: ug-CN, es_CO: es-CO, hsb_DE: hsb-DE, quz_BO: quz-BO, ar_JO: ar-JO, ii_CN: ii-CN, rm_CH: rm-CH, lb_LU: lb-LU, rw_RW: rw-RW, sr_BA: sr-BA, tg_TJ: tg-TJ, af_ZA: af-ZA, co_FR: co-FR, fa_IR: fa-IR, es_CL: es-CL, id_ID: id-ID, ky_KG: ky-KG, pa_IN: pa-IN, si_LK: si-LK, ar_DZ: ar-DZ, ar_KW: ar-KW, dv_MV: dv-MV, zh_MO: zh-MO, zu_ZA: zu-ZA, es_PY: es-PY, ig_NG: ig-NG, it_CH: it-CH, lo_LA: lo-LA, tk_TM: tk-TM, as_IN: as-IN, es_MX: es-MX, es_PR: es-PR, ja_JP: ja-JP, nso_ZA: nso-ZA, th_TH: th-TH, ur_PK: ur-PK, bn_BD: bn-BD, dsb_DE: dsb-DE, es_US: es-US, or_IN: or-IN, ar_YE: ar-YE, da_DK: da-DK, fr_CA: fr-CA, es_HN: es-HN, fr_MC: fr-MC, kk_KZ: kk-KZ, mn_MN: mn-MN, ne_NP: ne-NP, ar_LB: ar-LB, bo_CN: bo-CN, en_ZW: en-ZW, sr_ME: sr-ME, zh_CN: zh-CN, fr_LU: fr-LU, ms_MY: ms-MY, pl_PL: pl-PL, smj_NO: smj-NO, yo_NG: yo-NG, en_IE: en-IE, es_BO: es-BO, es_UY: es-UY, ha_NG: ha-NG, nl_BE: nl-BE, quz_EC: quz-EC, br_FR: br-FR, en_GB: en-GB, eu_ES: eu-ES, hi_IN: hi-IN, xh_ZA: xh-ZA, sr_CS: sr-CS, sr_RS: sr-RS, tn_ZA: tn-ZA, ar_SA: ar-SA, fr_BE: fr-BE, ms_BN: ms-BN, en_IN: en-IN, es_AR: es-AR, es_CR: es-CR, es_VE: es-VE, km_KH: km-KH, arn_CL: arn-CL, ba_RU: ba-RU, de_LI: de-LI, se_NO: se-NO, ar_EG: ar-EG, fil_PH: fil-PH, moh_CA: moh-CA, az_AZ: az-AZ, es_ES: es-ES, ka_GE: ka-GE, hr_HR: hr-HR, mk_MK: mk-MK, nl_NL: nl-NL, sma_SE: sma-SE, smj_SE: smj-SE, ar_AE: ar-AE, el_GR: el-GR, gu_IN: gu-IN, uk_UA: uk-UA, kok_IN: kok-IN, te_IN: te-IN, de_LU: de-LU, en_NZ: en-NZ, fr_FR: fr-FR, es_SV: es-SV, he_IL: he-IL, ps_AF: ps-AF, ga_IE: ga-IE, wo_SN: wo-SN, bg_BG: bg-BG, de_CH: de-CH, en_US: en-US 9 | 10 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | PRIMARY AUTHORS 2 | 3 | Rocky Duan 4 | Arjun Singh 5 | Kevin Chugh 6 | Ibrahim Awwal 7 | 8 | CONTRIBUTORS 9 | 10 | Mike Chen 11 | David Ormsbee 12 | Matthew Mongeau 13 | Christina Roberts 14 | Calen Pennington 15 | Ed Zarecor 16 | Jay Zoldak 17 | Jim Abramson 18 | Greg Price 19 | Sarina Canelake 20 | Alexandre Dubus 21 | Alan Boudreault 22 | Matjaz Gregoric 23 | Ben McMorran 24 | Bill DeRusha 25 | Brian Beggs 26 | Clinton Blackburn 27 | Awais Jibran 28 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | Change Log 2 | ---------- 3 | 4 | These are notable changes in cs_comments_service. This is a rolling list of changes, 5 | in roughly chronological order, most recent first. Add your entries at or near 6 | the top. Include a label indicating the component affected. 7 | 8 | **app.rb:** Return the correct Content-Type, application/json. 9 | 10 | **api:** Add the ability to filter by commentable id to more endpoints 11 | (in particular, /threads). 12 | 13 | **models:** added a new sorting key and index to `Comment` documents, removing the need 14 | for certain hierarchical db queries. Also added a copy of the author's username 15 | to `Comment` and `CommentThread` models, to reduce the number db queries. 16 | IMPORTANT: 17 | - these changes require a data backpopulation to be run BEFORE deploying 18 | updated code. The backpopulation script is located at 19 | scripts/db/migrate-001-sk-author_username.js 20 | and should be run directly against your MongoDB instance. 21 | - If your application is online while the migration script is running, you should run 22 | the script a second time after deploying the application. This is to backpopulate any 23 | new content that may have been created between the first run and the application update. 24 | 25 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | ruby ">= 3.1" 3 | 4 | gem 'pry' 5 | gem 'pry-nav' 6 | 7 | # Use with command-line debugging, but not RubyMine 8 | #gem 'debugger' 9 | 10 | gem 'bundler' 11 | 12 | gem 'rake' 13 | 14 | gem 'sinatra' 15 | gem 'sinatra-param', '~> 1.4' 16 | 17 | gem 'yajl-ruby', '~> 1.3.1' 18 | 19 | gem 'activemodel' 20 | gem 'protected_attributes_continued' 21 | 22 | gem 'mongoid' 23 | gem 'bson' 24 | gem 'bson_ext' 25 | 26 | gem 'delayed_job' 27 | gem 'delayed_job_mongoid' 28 | 29 | gem "enumerize" 30 | gem 'mongoid-tree', :git => 'https://github.com/edx/mongoid-tree' 31 | gem 'rs_voteable_mongo', '~> 1.3' 32 | gem 'mongoid_magic_counter_cache' 33 | 34 | # Before updating will_paginate version, we need to make sure that property 'total_entries' 35 | # exists otherwise use updated property name to fetch total collection count in lib/helpers.rb's 36 | # function 'handle_threads_query'. 37 | gem 'will_paginate_mongoid', "~>2.0" 38 | gem 'rdiscount' 39 | 40 | gem 'elasticsearch', '~> 7.8.0' 41 | gem 'elasticsearch-model', '~> 7.1.0' 42 | 43 | gem 'dalli' 44 | 45 | gem 'rest-client' 46 | 47 | group :test do 48 | gem 'simplecov-cobertura', :require => false 49 | gem 'mongoid_cleaner', '~> 1.2.0' 50 | gem 'factory_bot' 51 | gem 'faker' 52 | gem 'guard' 53 | gem 'guard-unicorn' 54 | gem 'rack-test', :require => 'rack/test' 55 | gem 'rspec' 56 | gem 'rspec-its' 57 | gem 'rspec-collection_matchers' 58 | gem 'webmock' 59 | end 60 | 61 | group 'newrelic_rpm' do 62 | gem 'newrelic_rpm' 63 | end 64 | 65 | gem 'unicorn' 66 | gem "rack-timeout" 67 | gem "i18n" 68 | gem "rack-contrib", :git => 'https://github.com/rack/rack-contrib.git', :ref => '6ff3ca2b2d988911ca52a2712f6a7da5e064aa27' 69 | 70 | 71 | gem "timecop", "~> 0.9.5" 72 | gem 'ddtrace', require: 'ddtrace/auto_instrument' 73 | -------------------------------------------------------------------------------- /Gemfile.lock: -------------------------------------------------------------------------------- 1 | GIT 2 | remote: https://github.com/edx/mongoid-tree 3 | revision: 51384016e09a0623fe7b5511b6aff1435607ee0f 4 | specs: 5 | mongoid-tree (2.1.1) 6 | mongoid (>= 4.0, < 8) 7 | 8 | GIT 9 | remote: https://github.com/rack/rack-contrib.git 10 | revision: 6ff3ca2b2d988911ca52a2712f6a7da5e064aa27 11 | ref: 6ff3ca2b2d988911ca52a2712f6a7da5e064aa27 12 | specs: 13 | rack-contrib (1.2.0) 14 | rack (>= 0.9.1) 15 | 16 | GEM 17 | remote: https://rubygems.org/ 18 | specs: 19 | activemodel (7.0.8.1) 20 | activesupport (= 7.0.8.1) 21 | activesupport (7.0.8.1) 22 | concurrent-ruby (~> 1.0, >= 1.0.2) 23 | i18n (>= 1.6, < 2) 24 | minitest (>= 5.1) 25 | tzinfo (~> 2.0) 26 | addressable (2.8.6) 27 | public_suffix (>= 2.0.2, < 6.0) 28 | base64 (0.2.0) 29 | bigdecimal (3.1.6) 30 | bson (4.15.0) 31 | bson_ext (1.5.1) 32 | coderay (1.1.3) 33 | concurrent-ruby (1.2.3) 34 | crack (1.0.0) 35 | bigdecimal 36 | rexml 37 | dalli (3.2.8) 38 | datadog-ci (0.8.3) 39 | msgpack 40 | ddtrace (1.22.0) 41 | datadog-ci (~> 0.8.1) 42 | debase-ruby_core_source (= 3.3.1) 43 | libdatadog (~> 7.0.0.1.0) 44 | libddwaf (~> 1.14.0.0.0) 45 | msgpack 46 | debase-ruby_core_source (3.3.1) 47 | delayed_job (4.1.11) 48 | activesupport (>= 3.0, < 8.0) 49 | delayed_job_mongoid (3.0.0) 50 | delayed_job (>= 3.0, < 5) 51 | mongoid (>= 5.0) 52 | diff-lcs (1.5.1) 53 | docile (1.4.0) 54 | domain_name (0.6.20240107) 55 | elasticsearch (7.8.1) 56 | elasticsearch-api (= 7.8.1) 57 | elasticsearch-transport (= 7.8.1) 58 | elasticsearch-api (7.8.1) 59 | multi_json 60 | elasticsearch-model (7.1.1) 61 | activesupport (> 3) 62 | elasticsearch (> 1) 63 | hashie 64 | elasticsearch-transport (7.8.1) 65 | faraday (~> 1) 66 | multi_json 67 | enumerize (2.7.0) 68 | activesupport (>= 3.2) 69 | factory_bot (6.4.6) 70 | activesupport (>= 5.0.0) 71 | faker (3.2.3) 72 | i18n (>= 1.8.11, < 2) 73 | faraday (1.10.3) 74 | faraday-em_http (~> 1.0) 75 | faraday-em_synchrony (~> 1.0) 76 | faraday-excon (~> 1.1) 77 | faraday-httpclient (~> 1.0) 78 | faraday-multipart (~> 1.0) 79 | faraday-net_http (~> 1.0) 80 | faraday-net_http_persistent (~> 1.0) 81 | faraday-patron (~> 1.0) 82 | faraday-rack (~> 1.0) 83 | faraday-retry (~> 1.0) 84 | ruby2_keywords (>= 0.0.4) 85 | faraday-em_http (1.0.0) 86 | faraday-em_synchrony (1.0.0) 87 | faraday-excon (1.1.0) 88 | faraday-httpclient (1.0.1) 89 | faraday-multipart (1.0.4) 90 | multipart-post (~> 2) 91 | faraday-net_http (1.0.1) 92 | faraday-net_http_persistent (1.2.0) 93 | faraday-patron (1.0.0) 94 | faraday-rack (1.0.0) 95 | faraday-retry (1.0.3) 96 | ffi (1.16.3) 97 | formatador (1.1.0) 98 | guard (2.18.1) 99 | formatador (>= 0.2.4) 100 | listen (>= 2.7, < 4.0) 101 | lumberjack (>= 1.0.12, < 2.0) 102 | nenv (~> 0.1) 103 | notiffany (~> 0.0) 104 | pry (>= 0.13.0) 105 | shellany (~> 0.0) 106 | thor (>= 0.18.1) 107 | guard-unicorn (0.2.0) 108 | guard (>= 1.1) 109 | hashdiff (1.1.0) 110 | hashie (5.0.0) 111 | http-accept (1.7.0) 112 | http-cookie (1.0.5) 113 | domain_name (~> 0.5) 114 | i18n (1.14.1) 115 | concurrent-ruby (~> 1.0) 116 | kgio (2.11.4) 117 | libdatadog (7.0.0.1.0) 118 | libddwaf (1.14.0.0.0) 119 | ffi (~> 1.0) 120 | listen (3.8.0) 121 | rb-fsevent (~> 0.10, >= 0.10.3) 122 | rb-inotify (~> 0.9, >= 0.9.10) 123 | lumberjack (1.2.10) 124 | method_source (1.0.0) 125 | mime-types (3.5.2) 126 | mime-types-data (~> 3.2015) 127 | mime-types-data (3.2024.0206) 128 | minitest (5.22.2) 129 | mongo (2.19.3) 130 | bson (>= 4.14.1, < 5.0.0) 131 | mongoid (7.5.4) 132 | activemodel (>= 5.1, < 7.1, != 7.0.0) 133 | mongo (>= 2.10.5, < 3.0.0) 134 | ruby2_keywords (~> 0.0.5) 135 | mongoid_cleaner (1.2.0) 136 | mongoid (>= 4.0) 137 | mongoid_magic_counter_cache (1.1.1) 138 | mongoid 139 | rake 140 | msgpack (1.7.2) 141 | multi_json (1.15.0) 142 | multipart-post (2.4.0) 143 | mustermann (3.0.0) 144 | ruby2_keywords (~> 0.0.1) 145 | nenv (0.3.0) 146 | netrc (0.11.0) 147 | newrelic_rpm (9.7.1) 148 | notiffany (0.1.3) 149 | nenv (~> 0.1) 150 | shellany (~> 0.0) 151 | protected_attributes_continued (1.9.0) 152 | activemodel (>= 5.0) 153 | pry (0.14.2) 154 | coderay (~> 1.1) 155 | method_source (~> 1.0) 156 | pry-nav (1.0.0) 157 | pry (>= 0.9.10, < 0.15) 158 | public_suffix (5.0.4) 159 | rack (3.0.9.1) 160 | rack-protection (4.0.0) 161 | base64 (>= 0.1.0) 162 | rack (>= 3.0.0, < 4) 163 | rack-session (2.0.0) 164 | rack (>= 3.0.0) 165 | rack-test (2.1.0) 166 | rack (>= 1.3) 167 | rack-timeout (0.6.3) 168 | raindrops (0.20.1) 169 | rake (13.1.0) 170 | rb-fsevent (0.11.2) 171 | rb-inotify (0.10.1) 172 | ffi (~> 1.0) 173 | rdiscount (2.2.7.3) 174 | rest-client (2.1.0) 175 | http-accept (>= 1.7.0, < 2.0) 176 | http-cookie (>= 1.0.2, < 2.0) 177 | mime-types (>= 1.16, < 4.0) 178 | netrc (~> 0.8) 179 | rexml (3.2.6) 180 | rs_voteable_mongo (1.3.0) 181 | mongoid (~> 7.0) 182 | rspec (3.13.0) 183 | rspec-core (~> 3.13.0) 184 | rspec-expectations (~> 3.13.0) 185 | rspec-mocks (~> 3.13.0) 186 | rspec-collection_matchers (1.2.1) 187 | rspec-expectations (>= 2.99.0.beta1) 188 | rspec-core (3.13.0) 189 | rspec-support (~> 3.13.0) 190 | rspec-expectations (3.13.0) 191 | diff-lcs (>= 1.2.0, < 2.0) 192 | rspec-support (~> 3.13.0) 193 | rspec-its (1.3.0) 194 | rspec-core (>= 3.0.0) 195 | rspec-expectations (>= 3.0.0) 196 | rspec-mocks (3.13.0) 197 | diff-lcs (>= 1.2.0, < 2.0) 198 | rspec-support (~> 3.13.0) 199 | rspec-support (3.13.0) 200 | ruby2_keywords (0.0.5) 201 | shellany (0.0.1) 202 | simplecov (0.22.0) 203 | docile (~> 1.1) 204 | simplecov-html (~> 0.11) 205 | simplecov_json_formatter (~> 0.1) 206 | simplecov-cobertura (2.1.0) 207 | rexml 208 | simplecov (~> 0.19) 209 | simplecov-html (0.12.3) 210 | simplecov_json_formatter (0.1.4) 211 | sinatra (4.0.0) 212 | mustermann (~> 3.0) 213 | rack (>= 3.0.0, < 4) 214 | rack-protection (= 4.0.0) 215 | rack-session (>= 2.0.0, < 3) 216 | tilt (~> 2.0) 217 | sinatra-param (1.6.0) 218 | sinatra (>= 1.3) 219 | thor (1.3.0) 220 | tilt (2.3.0) 221 | timecop (0.9.8) 222 | tzinfo (2.0.6) 223 | concurrent-ruby (~> 1.0) 224 | unicorn (6.1.0) 225 | kgio (~> 2.6) 226 | raindrops (~> 0.7) 227 | webmock (3.22.0) 228 | addressable (>= 2.8.0) 229 | crack (>= 0.3.2) 230 | hashdiff (>= 0.4.0, < 2.0.0) 231 | will_paginate (3.3.1) 232 | will_paginate_mongoid (2.0.1) 233 | mongoid 234 | will_paginate (~> 3.0) 235 | yajl-ruby (1.3.1) 236 | 237 | PLATFORMS 238 | ruby 239 | 240 | DEPENDENCIES 241 | activemodel 242 | bson 243 | bson_ext 244 | bundler 245 | dalli 246 | ddtrace 247 | delayed_job 248 | delayed_job_mongoid 249 | elasticsearch (~> 7.8.0) 250 | elasticsearch-model (~> 7.1.0) 251 | enumerize 252 | factory_bot 253 | faker 254 | guard 255 | guard-unicorn 256 | i18n 257 | mongoid 258 | mongoid-tree! 259 | mongoid_cleaner (~> 1.2.0) 260 | mongoid_magic_counter_cache 261 | newrelic_rpm 262 | protected_attributes_continued 263 | pry 264 | pry-nav 265 | rack-contrib! 266 | rack-test 267 | rack-timeout 268 | rake 269 | rdiscount 270 | rest-client 271 | rs_voteable_mongo (~> 1.3) 272 | rspec 273 | rspec-collection_matchers 274 | rspec-its 275 | simplecov-cobertura 276 | sinatra 277 | sinatra-param (~> 1.4) 278 | timecop (~> 0.9.5) 279 | unicorn 280 | webmock 281 | will_paginate_mongoid (~> 2.0) 282 | yajl-ruby (~> 1.3.1) 283 | 284 | RUBY VERSION 285 | ruby 3.1.4p223 286 | 287 | BUNDLED WITH 288 | 2.3.26 289 | -------------------------------------------------------------------------------- /Gemfile3: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | ruby "~> 3.0" 3 | 4 | gem 'pry' 5 | gem 'pry-nav' 6 | 7 | # Use with command-line debugging, but not RubyMine 8 | #gem 'debugger' 9 | 10 | gem 'bundler' 11 | 12 | gem 'rake' 13 | 14 | gem 'sinatra' 15 | gem 'sinatra-param', '~> 1.4' 16 | 17 | gem 'yajl-ruby', '~> 1.3.1' 18 | 19 | gem 'activemodel' 20 | gem 'protected_attributes_continued' 21 | 22 | gem 'mongoid' 23 | gem 'bson' 24 | gem 'bson_ext' 25 | 26 | gem 'delayed_job' 27 | gem 'delayed_job_mongoid' 28 | 29 | gem "enumerize" 30 | gem 'mongoid-tree', :git => 'https://github.com/edx/mongoid-tree' 31 | gem 'rs_voteable_mongo', '~> 1.3' 32 | gem 'mongoid_magic_counter_cache' 33 | 34 | # Before updating will_paginate version, we need to make sure that property 'total_entries' 35 | # exists otherwise use updated property name to fetch total collection count in lib/helpers.rb's 36 | # function 'handle_threads_query'. 37 | gem 'will_paginate_mongoid', "~>2.0" 38 | gem 'rdiscount' 39 | gem 'nokogiri', "~> 1.8.1" 40 | 41 | gem 'elasticsearch', '~> 7.8.0' 42 | gem 'elasticsearch-model', '~> 7.1.0' 43 | 44 | gem 'dalli' 45 | 46 | gem 'rest-client' 47 | 48 | group :test do 49 | gem 'simplecov-cobertura', :require => false 50 | gem 'mongoid_cleaner', '~> 1.2.0' 51 | gem 'factory_bot' 52 | gem 'faker' 53 | gem 'guard' 54 | gem 'guard-unicorn' 55 | gem 'rack-test', :require => 'rack/test' 56 | gem 'rspec' 57 | gem 'rspec-its' 58 | gem 'rspec-collection_matchers' 59 | gem 'webmock' 60 | end 61 | 62 | group 'newrelic_rpm' do 63 | gem 'newrelic_rpm' 64 | end 65 | 66 | gem 'unicorn' 67 | gem "rack-timeout" 68 | gem "i18n" 69 | gem "rack-contrib", :git => 'https://github.com/rack/rack-contrib.git', :ref => '6ff3ca2b2d988911ca52a2712f6a7da5e064aa27' 70 | 71 | 72 | gem "timecop", "~> 0.9.5" 73 | gem 'ddtrace', require: 'ddtrace/auto_instrument' 74 | -------------------------------------------------------------------------------- /Guardfile: -------------------------------------------------------------------------------- 1 | # A sample Guardfile 2 | # More info at https://github.com/guard/guard#readme 3 | 4 | ## Sample template for guard-unicorn 5 | # 6 | # Usage: 7 | # guard :unicorn, 8 | # 9 | # Possible options: 10 | # * :daemonize (default is true) - should the Unicorn server start daemonized? 11 | # * :config_file (default is "config/unicorn.rb") - the path to the unicorn file 12 | # * :pid_file (default is "tmp/pids/unicorn.pid") - the path to the unicorn pid file 13 | guard :unicorn, :daemonize => false, :port => 4567 do 14 | watch('app.rb') 15 | watch(%|api/.*\.rb|) 16 | watch(%|lib/.*\.rb|) 17 | watch(%|models/.*\.rb|) 18 | watch(%|config/.*\.rb|) 19 | end 20 | -------------------------------------------------------------------------------- /Procfile: -------------------------------------------------------------------------------- 1 | web: bundle exec unicorn -p $PORT -c ./config/unicorn.heroku.rb 2 | 3 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Part of `edX code`__. 2 | 3 | __ http://code.edx.org/ 4 | 5 | edX Comments Service/Forums |Build|_ |Codecov|_ 6 | ================================================== 7 | .. |Build| image:: https://github.com/openedx/cs_comments_service/workflows/RUBY%20CI/badge.svg?branch=master 8 | .. _Build: https://github.com/openedx/cs_comments_service/actions?query=workflow%3A%22RUBY+CI%22 9 | 10 | .. |Codecov| image:: http://codecov.io/github/edx/cs_comments_service/coverage.svg?branch=master 11 | .. _Codecov: http://codecov.io/github/edx/cs_comments_service?branch=master 12 | 13 | An independent comment system which supports voting and nested comments. It 14 | also supports features including instructor endorsement for education-aimed 15 | discussion platforms. 16 | 17 | Getting Started 18 | --------------- 19 | If you are running cs_comments_service as part of edx-platform__ development under 20 | devstack, it is strongly recommended to read `those setup documents`__ first. Note that 21 | devstack will take care of just about all of the installation, configuration, and 22 | service management on your behalf. If running outside of devstack, continue reading below. 23 | 24 | __ https://github.com/openedx/edx-platform 25 | __ https://github.com/openedx/configuration/wiki/edX-Developer-Stack 26 | 27 | This service relies on Elasticsearch and MongoDB. By default the service will use the Elasticsearch server available at 28 | `http://localhost:9200` and the MongoDB server available at `localhost:27017`. This is suitable for local development; 29 | however, if you wish to change these values, refer to `config/application.yml` and `config/mongoid.yml` for the 30 | environment variables that can be set to override the defaults. 31 | 32 | Install the requisite gems: 33 | 34 | .. code-block:: bash 35 | 36 | $ bundle install 37 | 38 | To initialize indices: 39 | 40 | Setup search indices. Note that the command below creates `comments_20161220185820323` and 41 | `comment_threads_20161220185820323` indices and assigns `comments` and `comment_threads` aliases. This will enable you 42 | to swap out indices (e.g. rebuild_index) without having to take downtime or modify code with a new index name. 43 | 44 | .. code-block:: bash 45 | 46 | $ bin/rake search:initialize 47 | 48 | To validate indices exist and contain the proper mappings: 49 | 50 | .. code-block:: bash 51 | 52 | $ bin/rake search:validate_indices 53 | 54 | To rebuild indices: 55 | 56 | To rebuild new indices from the database and then point the aliases `comments` and `comment_threads` to each index 57 | which has equivalent index prefix, you can use the rebuild_indices task. This task will also run catch up before 58 | and after aliases are moved, to minimize time where aliases do not contain all documents. 59 | 60 | .. code-block:: bash 61 | 62 | $ bin/rake search:rebuild_indices 63 | 64 | You can also adjust the batch size (e.g. 200) and the sleep time (e.g. 2 seconds) between batches to lighten the load 65 | on MongoDB. 66 | 67 | .. code-block:: bash 68 | 69 | $ bin/rake search:rebuild_indices[200,2] 70 | 71 | Run the server: 72 | 73 | .. code-block:: 74 | 75 | $ ruby app.rb 76 | 77 | By default Sinatra runs on port `4567`. If you'd like to use a different port pass the `-p` parameter: 78 | 79 | .. code-block:: 80 | 81 | $ ruby app.rb -p 5678 82 | 83 | Rake timeout configuration should be set as env varaiable, the default value is 15 second. to set to 20 second: 84 | 85 | .. code-block:: 86 | 87 | $ RACK_TIMEOUT_SERVICE_TIMEOUT=20 ruby app.rb -p 5678 88 | 89 | Running Tests 90 | ------------- 91 | Tests are built using the rspec__ framework, and can be run with the command below: 92 | 93 | .. code-block:: 94 | 95 | $ bin/rspec 96 | 97 | If you'd like to view additional options for the command, append the `--help` option: 98 | 99 | .. code-block:: 100 | 101 | $ bin/rspec --help 102 | 103 | __ http://rspec.info/ 104 | 105 | 106 | Running Tests with Docker 107 | ------------------------- 108 | You can also use docker-compose to run your tests as follows (assuming you have 109 | docker-compose installed): 110 | 111 | .. code-block:: 112 | 113 | $ docker-compose -f .github/docker-compose-ci.yml run --rm test-forum 114 | 115 | To debug the tests using docker-compose, first start up the containers: 116 | 117 | .. code-block:: 118 | 119 | $ # Note: Ignore errors creating forum_testing container after it was already started 120 | $ docker-compose -f .github/docker-compose-ci.yml up 121 | 122 | Next, shell into the container: 123 | 124 | .. code-block:: 125 | 126 | $ docker exec -it forum_testing bash 127 | 128 | Finally, from inside the container, start the tests: 129 | 130 | .. code-block:: 131 | 132 | $ cd /edx/app/forum/cs_comments_service/ 133 | $ .github/run_tests.sh 134 | 135 | Tips: 136 | 137 | * After running for the first time, you can speed up ``run_tests.sh`` by commenting out ``bundle install`` and ``sleep 10``, which is only needed the first time. 138 | * Add ``binding.pry`` in code anywhere you want a breakpoint to start debugging. 139 | 140 | Internationalization (i18n) and Localization (l10n) 141 | --------------------------------------------------- 142 | 143 | To run the comments service in a language other than English, set the 144 | ``SERVICE_LANGUAGE`` environment variable to the `language code` for the 145 | desired language. Its default value is en-US. 146 | 147 | Setting the language has no effect on user content stored by the service. 148 | However, there are a few data validation messages that may be seen by end 149 | users via the frontend in edx-platform__. These will be 150 | translated to ``SERVICE_LANGUAGE`` assuming a suitable translation file is 151 | found in the locale/ directory. 152 | 153 | __ https://github.com/openedx/edx-platform 154 | 155 | edX uses Transifex to host translations. To use the Transifex client, be sure 156 | it is installed (``pip install transifex-client`` will do this for you), and 157 | follow the instructions here__ to set up your ``.transifexrc`` file. 158 | 159 | __ http://support.transifex.com/customer/portal/articles/1000855-configuring-the-client 160 | 161 | To upload strings to Transifex for translation when you change the set 162 | of translatable strings: ``bin/rake i18n:push`` 163 | 164 | To fetch the latest translations from Transifex: ``bin/rake i18n:pull`` 165 | 166 | The repository includes some translations so they will be available 167 | upon deployment. To commit an update to these: ``bin/rake i18n:commit`` 168 | 169 | License 170 | ------- 171 | 172 | The code in this repository is licensed under version 3 of the AGPL unless 173 | otherwise noted. 174 | 175 | Please see ``LICENSE.txt`` for details. 176 | 177 | How to Contribute 178 | ----------------- 179 | 180 | Contributions are very welcome. The easiest way is to fork this repo, and then 181 | make a pull request from your fork. The first time you make a pull request, you 182 | may be asked to sign a Contributor Agreement. 183 | 184 | Reporting Security Issues 185 | ------------------------- 186 | 187 | Please do not report security issues in public. Please email security@openedx.org 188 | 189 | Mailing List and IRC Channel 190 | ---------------------------- 191 | 192 | You can discuss this code on the `edx-code Google Group`__ or in the 193 | ``edx-code`` IRC channel on Freenode. 194 | 195 | __ https://groups.google.com/forum/#!forum/edx-code 196 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | require 'rubygems' 2 | require 'bundler' 3 | 4 | Bundler.setup 5 | Bundler.require 6 | 7 | begin 8 | require 'rspec/core/rake_task' 9 | 10 | RSpec::Core::RakeTask.new(:spec) 11 | 12 | # No default included because by default, running rspec/tests clears the database, which is bad on production. 13 | rescue LoadError 14 | # no rspec available 15 | end 16 | 17 | LOG = Logger.new(STDERR) 18 | 19 | desc 'Load the environment' 20 | task :environment do 21 | # Load all of app.rb to keep rake and the app as similar as possible. 22 | # Without this, we had run into bugs where certain overriding fixes in app.rb 23 | # were not used from the rake tasks. 24 | require File.dirname(__FILE__) + '/app.rb' 25 | end 26 | 27 | task :console => :environment do 28 | binding.pry 29 | end 30 | 31 | Dir.glob('lib/tasks/*.rake').each { |r| import r } 32 | -------------------------------------------------------------------------------- /api/comment_threads.rb: -------------------------------------------------------------------------------- 1 | 2 | get "#{APIPREFIX}/threads" do # retrieve threads by course 3 | # "sort_key" parameter will change order of threads returned and so may not always return in order 4 | # of most comments to least number of comments. 5 | 6 | # Note also that sorting sorts the pinned threads first and is not handled by elasticsearch but rather as a 7 | # part of the mongo query done once the thread IDs have been retrieved from ES. 8 | threads = CommentThread.where({"course_id" => params["course_id"]}) 9 | if params[:commentable_ids] 10 | threads = threads.in({"commentable_id" => params[:commentable_ids].split(",")}) 11 | end 12 | 13 | handle_threads_query( 14 | threads, 15 | params["user_id"], 16 | params["course_id"], 17 | get_group_ids_from_params(params), 18 | params["author_id"], 19 | params["thread_type"], 20 | value_to_boolean(params["flagged"]), 21 | value_to_boolean(params["unread"]), 22 | value_to_boolean(params["unanswered"]), 23 | value_to_boolean(params["unresponded"]), 24 | value_to_boolean(params["count_flagged"]), 25 | params["sort_key"], 26 | params["page"], 27 | params["per_page"] 28 | ).to_json 29 | end 30 | 31 | get "#{APIPREFIX}/threads/:thread_id" do |thread_id| 32 | begin 33 | thread = CommentThread.find(thread_id) 34 | rescue Mongoid::Errors::DocumentNotFound 35 | error 404, [t(:requested_object_not_found)].to_json 36 | end 37 | 38 | merge_question_type_responses = value_to_boolean(params["merge_question_type_responses"]) 39 | 40 | # user is required to return user-specific fields, such as "read" (even if bool_mark_as_read is False) 41 | if params["user_id"] 42 | user = User.only([:id, :username, :read_states]).find_by(external_id: params["user_id"]) 43 | end 44 | if user and bool_mark_as_read 45 | user.mark_as_read(thread) 46 | end 47 | 48 | presenter = ThreadPresenter.factory(thread, user || nil) 49 | if params.has_key?("resp_skip") 50 | unless (resp_skip = Integer(params["resp_skip"]) rescue nil) && resp_skip >= 0 51 | error 400, [t(:param_must_be_a_non_negative_number, :param => 'resp_skip')].to_json 52 | end 53 | else 54 | resp_skip = 0 55 | end 56 | if params["resp_limit"] 57 | unless (resp_limit = Integer(params["resp_limit"]) rescue nil) && resp_limit >= 0 58 | error 400, [t(:param_must_be_a_number_greater_than_zero, :param => 'resp_limit')].to_json 59 | end 60 | else 61 | resp_limit = CommentService.config["thread_response_default_size"] 62 | end 63 | size_limit = CommentService.config["thread_response_size_limit"] 64 | unless (resp_limit <= size_limit) 65 | error 400, [t(:param_exceeds_limit, :param => resp_limit, :limit => size_limit)].to_json 66 | end 67 | presenter.to_hash( 68 | bool_with_responses, resp_skip, resp_limit, bool_recursive, bool_flagged_comments, bool_reverse_order, 69 | merge_question_type_responses 70 | ).to_json 71 | end 72 | 73 | put "#{APIPREFIX}/threads/:thread_id" do |thread_id| 74 | filter_blocked_content params["body"] 75 | updated_content = params.slice(*%w[title body pinned closed commentable_id group_id thread_type close_reason_code]) 76 | # If a close reason code is provided, save it. If a thread is being reopened, clear the closed_by flag 77 | if params[:closing_user_id] 78 | if updated_content.has_key? CLOSED 79 | if value_to_boolean(updated_content[CLOSED]) 80 | updated_content["closed_by"] = User.find_by(external_id: params[:closing_user_id]) 81 | else 82 | updated_content["closed_by"] = nil 83 | updated_content["close_reason_code"] = nil 84 | end 85 | end 86 | end 87 | if params[:editing_user_id] 88 | if updated_content.has_key? BODY and updated_content[BODY] != thread.body 89 | editor = User.find_by(external_id: params[:editing_user_id]) 90 | edit_reason_code = params.fetch("edit_reason_code", nil) 91 | thread.edit_history.build( 92 | original_body: thread.body, 93 | author: editor, 94 | reason_code: edit_reason_code, 95 | editor_username: editor.username, 96 | ) 97 | end 98 | end 99 | thread.update_attributes(updated_content) 100 | 101 | if thread.errors.any? 102 | error 400, thread.errors.full_messages.to_json 103 | else 104 | presenter = ThreadPresenter.factory(thread, nil) 105 | presenter.to_hash.to_json 106 | end 107 | end 108 | 109 | post "#{APIPREFIX}/threads/:thread_id/comments" do |thread_id| 110 | filter_blocked_content params["body"] 111 | comment = Comment.new(params.slice(*%w[body course_id])) 112 | comment.anonymous = bool_anonymous || false 113 | comment.anonymous_to_peers = bool_anonymous_to_peers || false 114 | comment.author = user 115 | comment.comment_thread = thread 116 | comment.child_count = 0 117 | comment.save 118 | if comment.errors.any? 119 | error 400, comment.errors.full_messages.to_json 120 | else 121 | user.subscribe(thread) if bool_auto_subscribe 122 | # Mark thread as read for owner user on comment creation 123 | user.mark_as_read(thread) 124 | comment.to_hash.to_json 125 | end 126 | end 127 | 128 | delete "#{APIPREFIX}/threads/:thread_id" do |thread_id| 129 | thread.destroy 130 | thread.to_hash.to_json 131 | end 132 | -------------------------------------------------------------------------------- /api/commentables.rb: -------------------------------------------------------------------------------- 1 | delete "#{APIPREFIX}/:commentable_id/threads" do |commentable_id| 2 | commentable.comment_threads.destroy_all 3 | {}.to_json 4 | end 5 | 6 | get "#{APIPREFIX}/:commentable_id/threads" do |commentable_id| 7 | threads = Content.where({"_type" => "CommentThread", "commentable_id" => commentable_id}) 8 | if params["course_id"] 9 | threads = threads.where({"course_id" => params["course_id"]}) 10 | end 11 | 12 | handle_threads_query( 13 | threads, 14 | params["user_id"], 15 | params["course_id"], 16 | get_group_ids_from_params(params), 17 | params["author_id"], 18 | params["thread_type"], 19 | value_to_boolean(params["flagged"]), 20 | value_to_boolean(params["unread"]), 21 | value_to_boolean(params["unanswered"]), 22 | value_to_boolean(params["unresponded"]), 23 | value_to_boolean(params["count_flagged"]), 24 | params["sort_key"], 25 | params["page"], 26 | params["per_page"], 27 | params["context"] ? params["context"] : :course 28 | ).to_json 29 | end 30 | 31 | get "#{APIPREFIX}/commentables/:course_id/counts" do |course_id| 32 | commentable_counts = {} 33 | Content.collection.aggregate( 34 | [ 35 | # Match all threads in the course 36 | { "$match" => { :course_id => course_id, :_type => "CommentThread" } }, 37 | # Group all the threads in the course by the type of thread and the topic of the thread 38 | # (represented by commentable_id) and keep a count of each 39 | { 40 | "$group" => { 41 | :_id => { :topic_id => "$commentable_id", :type => "$thread_type" }, 42 | :count => { "$sum" => 1 }, 43 | } 44 | } 45 | ]).each do |commentable| 46 | # The data returned by mongo is structured as rows mapping a topic id and thread type pair with a count 47 | # here we convert that to a map of topic id to thread counts of each type. 48 | topic_id = commentable[:_id][:topic_id] 49 | unless commentable_counts.has_key? topic_id 50 | commentable_counts[topic_id] = { :discussion => 0, :question => 0 } 51 | end 52 | commentable_counts[topic_id].merge! commentable[:_id][:type] => commentable["count"] 53 | end 54 | commentable_counts.to_json 55 | end 56 | 57 | post "#{APIPREFIX}/:commentable_id/threads" do |commentable_id| 58 | filter_blocked_content params["body"] 59 | thread = CommentThread.new(params.slice(*%w[title body course_id ]).merge(commentable_id: commentable_id)) 60 | thread.thread_type = params["thread_type"] || :discussion 61 | thread.anonymous = bool_anonymous || false 62 | thread.anonymous_to_peers = bool_anonymous_to_peers || false 63 | 64 | if params["group_id"] 65 | thread.group_id = params["group_id"] 66 | end 67 | 68 | if params["context"] 69 | thread.context = params["context"] 70 | end 71 | 72 | thread.author = user 73 | thread.save 74 | 75 | if thread.errors.any? 76 | error 400, thread.errors.full_messages.to_json 77 | else 78 | # Mark thread as read for owner user on creation 79 | user.mark_as_read(thread) 80 | user.subscribe(thread) if bool_auto_subscribe 81 | 82 | # Initialize ThreadPresenter; if non-null user is passed it also calculates 83 | # user specific data on initialization such as thread "read" status 84 | presenter = ThreadPresenter.factory(thread, user) 85 | thread = presenter.to_hash 86 | thread["resp_total"] = 0 87 | thread.to_json 88 | end 89 | end 90 | -------------------------------------------------------------------------------- /api/comments.rb: -------------------------------------------------------------------------------- 1 | get "#{APIPREFIX}/comments" do 2 | # The `user_id` is passed via a query string, as opposed to making 3 | # comments a sub-resource of a user-specific endpoint, so that this 4 | # endpoint is decoupled from the specific use case of getting comments 5 | # from an user, and can be extended, in the future, with alternative 6 | # methods of filtering. 7 | 8 | # handle filters 9 | error 400, "user_id is required" unless params['user_id'] 10 | query = Comment.where( 11 | author_id: params['user_id'], 12 | anonymous: false, 13 | anonymous_to_peers: false, 14 | ) 15 | 16 | error 400, "course_id is required" unless params['course_id'] 17 | query = query.where(course_id: params['course_id']) 18 | 19 | if value_to_boolean(params["flagged"]) 20 | query = query.where(:abuse_flaggers.nin => [nil, []]) 21 | end 22 | 23 | if params["order_by"] and params["order_by"] == "flagged" 24 | query = query.order(:abuse_flaggers.desc) 25 | end 26 | 27 | query = query.order(:created_at.desc) 28 | 29 | # handle pagination 30 | page = (params['page'] || DEFAULT_PAGE).to_i 31 | per_page = (params['per_page'] || DEFAULT_PER_PAGE).to_i 32 | paginated_collection = query.paginate(:page => page, :per_page => per_page) 33 | comment_count = paginated_collection.total_entries 34 | 35 | { 36 | collection: paginated_collection.map { |c| c.to_hash }, 37 | comment_count: comment_count, 38 | num_pages: [1, (comment_count / per_page.to_f).ceil].max, 39 | page: page, 40 | }.to_json 41 | end 42 | 43 | get "#{APIPREFIX}/comments/:comment_id" do |comment_id| 44 | @comment = comment 45 | comment_hash = @comment.to_hash(recursive: bool_recursive) 46 | verify_or_fix_cached_comment_count(@comment, comment_hash) 47 | comment_hash.to_json 48 | end 49 | 50 | put "#{APIPREFIX}/comments/:comment_id" do |comment_id| 51 | filter_blocked_content params["body"] 52 | updated_content = params.slice(*%w[body endorsed]) 53 | if params.has_key?("endorsed") 54 | new_endorsed_val = Mongoid::Boolean.mongoize(params["endorsed"]) 55 | if new_endorsed_val != comment.endorsed 56 | if params["endorsement_user_id"].nil? 57 | endorsement = nil 58 | else 59 | endorsement = {:user_id => params["endorsement_user_id"], :time => DateTime.now} 60 | end 61 | updated_content["endorsement"] = new_endorsed_val ? endorsement : nil 62 | end 63 | end 64 | if params[:editing_user_id] 65 | if updated_content.has_key? BODY and updated_content[BODY] != comment.body 66 | edit_reason_code = params.fetch("edit_reason_code", nil) 67 | editor = User.find_by(external_id: params[:editing_user_id]) 68 | comment.edit_history.build( 69 | original_body: comment.body, 70 | author: editor, 71 | reason_code: edit_reason_code, 72 | editor_username: editor.username, 73 | ) 74 | end 75 | end 76 | comment.update_attributes(updated_content) 77 | if comment.errors.any? 78 | error 400, comment.errors.full_messages.to_json 79 | else 80 | comment.to_hash.to_json 81 | end 82 | end 83 | 84 | post "#{APIPREFIX}/comments/:comment_id" do |comment_id| 85 | filter_blocked_content params["body"] 86 | sub_comment = comment.children.new(params.slice(*%w[body course_id])) 87 | sub_comment.anonymous = bool_anonymous || false 88 | sub_comment.anonymous_to_peers = bool_anonymous_to_peers || false 89 | sub_comment.author = user 90 | sub_comment.comment_thread = comment.comment_thread 91 | sub_comment.child_count = 0 92 | sub_comment.save 93 | if sub_comment.errors.any? 94 | error 400, sub_comment.errors.full_messages.to_json 95 | else 96 | comment.update_cached_child_count 97 | if comment.errors.any? 98 | error 400, comment.errors.full_messages.to_json 99 | else 100 | user.subscribe(comment.comment_thread) if bool_auto_subscribe 101 | # Mark thread as read for owner user on response creation 102 | user.mark_as_read(comment.comment_thread) 103 | sub_comment.to_hash.to_json 104 | end 105 | end 106 | end 107 | 108 | delete "#{APIPREFIX}/comments/:comment_id" do |comment_id| 109 | parent_id = comment.parent_id 110 | comment_as_json = comment.to_hash.to_json 111 | comment.destroy 112 | unless parent_id.nil? 113 | begin 114 | parent_comment = Comment.find(parent_id) 115 | parent_comment.update_cached_child_count 116 | rescue Mongoid::Errors::DocumentNotFound 117 | pass 118 | end 119 | end 120 | comment_as_json 121 | end 122 | -------------------------------------------------------------------------------- /api/flags.rb: -------------------------------------------------------------------------------- 1 | put "#{APIPREFIX}/threads/:thread_id/abuse_flag" do |thread_id| 2 | flag_as_abuse thread 3 | end 4 | 5 | put "#{APIPREFIX}/threads/:thread_id/abuse_unflag" do |thread_id| 6 | un_flag_as_abuse thread 7 | end 8 | 9 | put "#{APIPREFIX}/comments/:comment_id/abuse_flag" do |comment_id| 10 | flag_as_abuse comment 11 | end 12 | 13 | put "#{APIPREFIX}/comments/:comment_id/abuse_unflag" do |comment_id| 14 | un_flag_as_abuse comment 15 | end 16 | -------------------------------------------------------------------------------- /api/notifications.rb: -------------------------------------------------------------------------------- 1 | post "#{APIPREFIX}/notifications" do 2 | # get all notifications for a set of users and a range of dates 3 | # for example 4 | # http://localhost:4567/api/v1/notifications?api_key=PUT_YOUR_API_KEY_HERE 5 | # with POST params 6 | # user_ids=1217716,196353 7 | # from=2013-03-18+13%3A52%3A47+-0400 8 | # to=2013-03-19+13%3A53%3A11+-0400 9 | # note this takes date format 8601 year-month-day-(hours:minutes:seconds difference from UTC 10 | notifications_by_date_range_and_user_ids(CGI.unescape(params[:from]).to_time, CGI.unescape(params[:to]).to_time,params[:user_ids].split(',')) 11 | end 12 | -------------------------------------------------------------------------------- /api/notifications_and_subscriptions.rb: -------------------------------------------------------------------------------- 1 | get "#{APIPREFIX}/users/:user_id/notifications" do |user_id| 2 | user.notifications.map(&:to_hash).to_json 3 | end 4 | 5 | get "#{APIPREFIX}/users/:user_id/subscribed_threads" do |user_id| 6 | handle_threads_query( 7 | user.subscribed_threads.where({ "course_id" => params[:course_id] }), 8 | params["user_id"], 9 | params["course_id"], 10 | get_group_ids_from_params(params), 11 | params["author_id"], 12 | params["thread_type"], 13 | value_to_boolean(params["flagged"]), 14 | value_to_boolean(params["unread"]), 15 | value_to_boolean(params["unanswered"]), 16 | value_to_boolean(params["unresponded"]), 17 | value_to_boolean(params["count_flagged"]), 18 | params["sort_key"], 19 | params["page"], 20 | params["per_page"] 21 | ).to_json 22 | end 23 | 24 | post "#{APIPREFIX}/users/:user_id/subscriptions" do |user_id| 25 | user.subscribe(source).to_hash.to_json 26 | end 27 | 28 | delete "#{APIPREFIX}/users/:user_id/subscriptions" do |user_id| 29 | user.unsubscribe(source).to_hash.to_json 30 | end 31 | 32 | get "#{APIPREFIX}/threads/:thread_id/subscriptions" do |thread_id| 33 | page = (params['page'] || DEFAULT_PAGE).to_i 34 | per_page = (params['per_page'] || DEFAULT_PER_PAGE).to_i 35 | 36 | # Build a query hash based on the query parameters 37 | query = {} 38 | query[:source_id] = thread_id 39 | query[:source_type] = 'CommentThread' 40 | 41 | subscriptions = Subscription.where(query).paginate(:page => page, :per_page => per_page) 42 | subscriptions_count = subscriptions.total_entries 43 | 44 | content_type :json 45 | 46 | { 47 | collection: subscriptions.map(&:to_hash), 48 | num_pages: [1, (subscriptions_count / per_page.to_f).ceil].max, 49 | page: page, 50 | subscriptions_count: subscriptions_count 51 | }.to_json 52 | end 53 | -------------------------------------------------------------------------------- /api/pins.rb: -------------------------------------------------------------------------------- 1 | put "#{APIPREFIX}/threads/:thread_id/pin" do |thread_id| 2 | pin thread 3 | end 4 | 5 | put "#{APIPREFIX}/threads/:thread_id/unpin" do |thread_id| 6 | unpin thread 7 | end 8 | 9 | -------------------------------------------------------------------------------- /api/search.rb: -------------------------------------------------------------------------------- 1 | def get_thread_ids(context, group_ids, local_params, search_text) 2 | must = [] 3 | filter = [] 4 | must.push({term: {commentable_id: local_params['commentable_id']}}) if local_params['commentable_id'] 5 | must.push({terms: {commentable_id: local_params['commentable_ids'].split(',')}}) if local_params['commentable_ids'] 6 | must.push({term: {course_id: local_params['course_id']}}) if local_params['course_id'] 7 | must.push( 8 | { 9 | multi_match: { 10 | query: search_text, 11 | fields: [:title, :body], 12 | operator: :AND 13 | } 14 | } 15 | ) 16 | group_id = local_params['group_id'] 17 | 18 | if group_id 19 | filter.push( 20 | {:bool => {:must_not => {:exists => {:field => :group_id}}}}, 21 | {:term => {:group_id => group_id}} 22 | ) 23 | end 24 | 25 | filter.push( 26 | {:bool => {:must_not => {:exists => {:field => :context}}}}, 27 | {:term => {:context => context}} 28 | ) 29 | 30 | unless group_ids.empty? 31 | filter.push( 32 | {:bool => {:must_not => {:exists => {:field => :group_id}}}}, 33 | {:terms => {:group_id => group_ids}} 34 | ) 35 | end 36 | 37 | body = { 38 | size: CommentService.config['max_deep_search_comment_count'].to_i, 39 | sort: [ 40 | {updated_at: :desc} 41 | ], 42 | query: { 43 | bool: { 44 | must: must, 45 | should: filter 46 | } 47 | } 48 | } 49 | 50 | response = Elasticsearch::Model.client.search(index: TaskHelpers::ElasticsearchHelper::index_names, body: body) 51 | 52 | thread_ids = Set.new 53 | response['hits']['hits'].each do |hit| 54 | if hit['_index'].include? CommentThread.index_name 55 | thread_ids.add(hit['_id']) 56 | elsif hit['_index'].include? Comment.index_name 57 | thread_ids.add(hit['_source']['comment_thread_id']) 58 | else 59 | # There shouldn't be any other indices. Nevertheless, ignore them, if they are present. 60 | next 61 | end 62 | end 63 | thread_ids 64 | end 65 | 66 | def get_suggested_text(search_text) 67 | body = { 68 | suggest: { 69 | body_suggestions: { 70 | text: search_text, 71 | phrase: { 72 | field: :body 73 | } 74 | }, 75 | title_suggestions: { 76 | text: search_text, 77 | phrase: { 78 | field: :title 79 | } 80 | } 81 | } 82 | } 83 | 84 | response = Elasticsearch::Model.client.search(index: TaskHelpers::ElasticsearchHelper::index_names, body: body) 85 | body_suggestions = response['suggest'].fetch('body_suggestions', []) 86 | title_suggestions = response['suggest'].fetch('title_suggestions', []) 87 | 88 | [body_suggestions, title_suggestions].each do |suggestion| 89 | if suggestion.length > 0 90 | options = suggestion[0]['options'] 91 | return options[0]['text'] if options.length > 0 92 | end 93 | end 94 | 95 | nil 96 | end 97 | 98 | def get_threads(context, group_ids, local_params, search_text) 99 | # Because threads and comments are currently separate unrelated documents in 100 | # Elasticsearch, we must first query for all matching documents, then 101 | # extract the set of thread ids, and then sort the threads by the specified 102 | # criteria and paginate. For performance reasons, we currently limit the 103 | # number of documents considered (ordered by update recency), which means 104 | # that matching threads can be missed if the search terms are very common. 105 | thread_ids = get_thread_ids(context, group_ids, local_params, search_text) 106 | corrected_text = nil 107 | 108 | if thread_ids.empty? 109 | # Sadly, Elasticsearch does not have a facility for computing suggestions 110 | # with respect to a filter. It would be expensive to determine the best 111 | # suggestion with respect to our filter parameters, so we simply re-query 112 | # with the top suggestion. If that has no results, then we return no results 113 | # and no correction. 114 | corrected_text = get_suggested_text(search_text) 115 | thread_ids = get_thread_ids(context, group_ids, local_params, corrected_text) if corrected_text 116 | corrected_text = nil if thread_ids.empty? 117 | end 118 | 119 | result_obj = handle_threads_query( 120 | CommentThread.in({_id: thread_ids.to_a}), 121 | local_params['user_id'], 122 | local_params['course_id'], 123 | group_ids, 124 | local_params["author_id"], 125 | local_params["thread_type"], 126 | value_to_boolean(local_params['flagged']), 127 | value_to_boolean(local_params['unread']), 128 | value_to_boolean(local_params['unanswered']), 129 | value_to_boolean(local_params['unresponded']), 130 | value_to_boolean(local_params["count_flagged"]), 131 | local_params['sort_key'], 132 | local_params['page'], 133 | local_params['per_page'], 134 | context 135 | ) 136 | 137 | unless result_obj.empty? 138 | result_obj[:corrected_text] = corrected_text 139 | # NOTE this reflects the total results from ES, but does not consider 140 | # any post-filtering that might happen (e.g. unread, flagged...) before 141 | # results are shown to the user. 142 | result_obj[:total_results] = thread_ids.size 143 | end 144 | 145 | result_obj.to_json 146 | end 147 | 148 | error Sinatra::Param::InvalidParameterError do 149 | # NOTE (CCB): The current behavior of the service is to return a seemingly positive response 150 | # for an invalid request. In the future the API's contract should be modified so that HTTP 400 151 | # is returned. This informs the client that the request was invalid, rather than having to guess 152 | # about an empty response body. 153 | [200, '{}'] 154 | end 155 | 156 | get "#{APIPREFIX}/search/threads" do 157 | param :text, String, required: true 158 | param :context, String, default: 'course' 159 | param :sort_key, String, in: %w(activity comments date votes), transform: :downcase 160 | 161 | local_params = params # Necessary for params to be available inside blocks 162 | group_ids = get_group_ids_from_params(local_params) 163 | get_threads(params[:context], group_ids, local_params, params[:text]) 164 | end 165 | -------------------------------------------------------------------------------- /api/votes.rb: -------------------------------------------------------------------------------- 1 | put "#{APIPREFIX}/comments/:comment_id/votes" do |comment_id| 2 | vote_for comment 3 | end 4 | 5 | delete "#{APIPREFIX}/comments/:comment_id/votes" do |comment_id| 6 | undo_vote_for comment 7 | end 8 | 9 | put "#{APIPREFIX}/threads/:thread_id/votes" do |thread_id| 10 | vote_for thread 11 | end 12 | 13 | delete "#{APIPREFIX}/threads/:thread_id/votes" do |thread_id| 14 | undo_vote_for thread 15 | end 16 | -------------------------------------------------------------------------------- /app.rb: -------------------------------------------------------------------------------- 1 | require 'rubygems' 2 | require 'bundler' 3 | require 'erb' 4 | 5 | groups = [:default] 6 | 7 | # optionally add newrelic 8 | if ENV["NEW_RELIC_ENABLE"] 9 | groups.push(:newrelic_rpm) 10 | end 11 | 12 | Bundler.setup 13 | Bundler.require(*groups) 14 | 15 | logger = Logger.new(STDOUT) 16 | logger.level = Logger::WARN 17 | begin 18 | extend ::NewRelic::Agent::Instrumentation::ControllerInstrumentation::ClassMethods 19 | rescue NameError 20 | logger.warn "NewRelic agent library not installed" 21 | end 22 | 23 | env_index = ARGV.index("-e") 24 | env_arg = ARGV[env_index + 1] if env_index 25 | environment = env_arg || ENV["SINATRA_ENV"] || "development" 26 | 27 | RACK_ENV = environment 28 | module CommentService 29 | class << self 30 | attr_accessor :config 31 | attr_accessor :blocked_hashes 32 | 33 | def search_enabled? 34 | self.config[:enable_search] 35 | end 36 | end 37 | API_VERSION = 'v1' 38 | API_PREFIX = "/api/#{API_VERSION}" 39 | end 40 | 41 | if ENV["ENABLE_GC_PROFILER"] 42 | GC::Profiler.enable 43 | end 44 | 45 | def get_logger(progname, threshold=nil) 46 | logger = Logger.new(STDERR) 47 | logger.progname = progname 48 | logger.level = threshold || Logger::WARN 49 | logger 50 | end 51 | 52 | application_yaml = ERB.new(File.read("config/application.yml")).result() 53 | CommentService.config = YAML.load(application_yaml).with_indifferent_access 54 | 55 | # Raise sinatra-param exceptions so that we can process, and respond to, them appropriately 56 | set :raise_sinatra_param_exceptions, true 57 | 58 | # Setup Mongo 59 | Mongoid.load!("config/mongoid.yml", environment) 60 | Mongoid.logger.level = Logger::INFO 61 | Mongo::Logger.logger.level = ENV["ENABLE_MONGO_DEBUGGING"] ? Logger::DEBUG : Logger::INFO 62 | 63 | # Setup Elasticsearch 64 | # NOTE (CCB): If you want to see all data sent to Elasticsearch (e.g. for debugging purposes), set the tracer argument 65 | # to the value of a logger. 66 | # Example: Elasticsearch::Client.new(tracer: get_logger('elasticsearch.tracer')) 67 | # NOTE: You can also add a logger, but it will log some FATAL warning during index creation. 68 | # Example: Elasticsearch::Client.new(logger: get_logger('elasticsearch', Logger::WARN)) 69 | Elasticsearch::Model.client = Elasticsearch::Client.new( 70 | url: CommentService.config[:elasticsearch_server], 71 | log: false, 72 | transport_options: CommentService.config[:elasticsearch_transport_options], 73 | ) 74 | 75 | # Setup i18n 76 | I18n.load_path += Dir[File.join(File.dirname(__FILE__), 'locale', '*.yml').to_s] 77 | I18n.default_locale = CommentService.config[:default_locale] 78 | I18n.enforce_available_locales = false 79 | I18n::Backend::Simple.send(:include, I18n::Backend::Fallbacks) 80 | use Rack::Locale 81 | 82 | helpers do 83 | def t(*args, **kwargs) 84 | I18n.t(*args, **kwargs) 85 | end 86 | end 87 | 88 | Dir[File.dirname(__FILE__) + '/lib/**/*.rb'].each { |file| require file } 89 | Dir[File.dirname(__FILE__) + '/models/*.rb'].each { |file| require file } 90 | Dir[File.dirname(__FILE__) + '/presenters/*.rb'].each { |file| require file } 91 | 92 | # Comment out observers until notifications are actually set up properly. 93 | #Dir[File.dirname(__FILE__) + '/models/observers/*.rb'].each {|file| require file} 94 | #Mongoid.observers = PostReplyObserver, PostTopicObserver, AtUserObserver 95 | #Mongoid.instantiate_observers 96 | 97 | APIPREFIX = CommentService::API_PREFIX 98 | DEFAULT_PAGE = 1 99 | DEFAULT_PER_PAGE = 20 100 | 101 | before do 102 | pass if request.path_info == '/heartbeat' 103 | api_key = CommentService.config[:api_key] 104 | error 401 unless params[:api_key] == api_key or env["HTTP_X_EDX_API_KEY"] == api_key 105 | end 106 | 107 | before do 108 | content_type "application/json" 109 | end 110 | 111 | # use yajl implementation for to_json. 112 | # https://github.com/brianmario/yajl-ruby#json-gem-compatibility-api 113 | # 114 | # In addition to performance advantages over the standard JSON gem, 115 | # this avoids a bug with non-BMP characters. For more info see: 116 | # https://github.com/rails/rails/issues/3727 117 | require 'yajl/json_gem' 118 | 119 | # patch json serialization of ObjectIds to work properly with yajl. 120 | # See https://groups.google.com/forum/#!topic/mongoid/MaXFVw7D_4s 121 | # Note that BSON was moved from Moped::BSON::ObjectId to BSON::ObjectId 122 | module BSON 123 | class ObjectId 124 | def as_json(options = {}) 125 | self.to_s 126 | end 127 | end 128 | end 129 | 130 | # Patch json serialization of Time Objects 131 | class Time 132 | # Returns a hash, that will be turned into a JSON object and represent this 133 | # object. 134 | # Note that this was done to prevent milliseconds from showing up in the JSON response thus breaking 135 | # API compatibility for downstream clients. 136 | def as_json(options = {}) 137 | utc().strftime("%Y-%m-%dT%H:%M:%SZ") 138 | end 139 | end 140 | 141 | 142 | # these files must be required in order 143 | require_relative 'mongoutil' 144 | require './api/search' 145 | require './api/commentables' 146 | require './api/comment_threads' 147 | require './api/comments' 148 | require './api/users' 149 | require './api/votes' 150 | require './api/flags' 151 | require './api/pins' 152 | require './api/notifications_and_subscriptions' 153 | require './api/notifications' 154 | 155 | if RACK_ENV.to_s == "development" 156 | get "#{APIPREFIX}/clean" do 157 | [Delayed::Backend::Mongoid::Job, Comment, CommentThread, User, Notification, Subscription, Activity].each(&:delete_all).each(&:remove_indexes).each(&:create_indexes) 158 | {}.to_json 159 | end 160 | end 161 | 162 | error Mongo::Error::InvalidDocument do 163 | error 400, [t(:requested_object_not_found)].to_json 164 | end 165 | 166 | error Mongoid::Errors::DocumentNotFound do 167 | error 400, [t(:requested_object_not_found)].to_json 168 | end 169 | 170 | error ArgumentError do 171 | error 400, [env['sinatra.error'].message].to_json 172 | end 173 | 174 | CommentService.blocked_hashes = Content.mongo_client[:blocked_hash].find(nil, projection: {hash: 1}).map { |d| d["hash"] } 175 | 176 | def elasticsearch_health 177 | Elasticsearch::Model.client.cluster.health 178 | end 179 | 180 | def is_elasticsearch_available? 181 | begin 182 | health = elasticsearch_health 183 | return !health['timed_out'] && %w(yellow green).include?(health['status']) 184 | rescue 185 | # ignored 186 | end 187 | 188 | false 189 | end 190 | 191 | begin 192 | newrelic_ignore '/heartbeat' 193 | rescue NameError 194 | logger.warn "NewRelic agent library not installed" 195 | end 196 | 197 | get '/heartbeat' do 198 | reconnect_mongo_primary 199 | error 500, JSON.generate({OK: false, check: :db}) unless is_mongo_available? 200 | error 500, JSON.generate({OK: false, check: :es}) unless is_elasticsearch_available? 201 | JSON.generate({OK: true}) 202 | end 203 | 204 | get '/selftest' do 205 | begin 206 | t1 = Time.now 207 | status = { 208 | db: get_db_is_master, 209 | es: elasticsearch_health, 210 | last_post_created: (Content.last.created_at rescue nil), 211 | total_posts: Content.count, 212 | total_users: User.count, 213 | elapsed_time: Time.now - t1 214 | } 215 | JSON.generate(status) 216 | rescue => ex 217 | [500, 218 | {'Content-Type' => 'text/plain'}, 219 | "#{ex.backtrace.first}: #{ex.message} (#{ex.class})\n\t#{ex.backtrace[1..-1].join("\n\t")}" 220 | ] 221 | end 222 | end 223 | -------------------------------------------------------------------------------- /bin/rake: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # 3 | # This file was generated by Bundler. 4 | # 5 | # The application 'rake' is installed as part of a gem, and 6 | # this file is here to facilitate running it. 7 | # 8 | 9 | require "pathname" 10 | ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../../Gemfile", 11 | Pathname.new(__FILE__).realpath) 12 | 13 | require "rubygems" 14 | require "bundler/setup" 15 | 16 | load Gem.bin_path("rake", "rake") 17 | -------------------------------------------------------------------------------- /bin/rspec: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # 3 | # This file was generated by Bundler. 4 | # 5 | # The application 'rspec' is installed as part of a gem, and 6 | # this file is here to facilitate running it. 7 | # 8 | 9 | require "pathname" 10 | ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../../Gemfile", 11 | Pathname.new(__FILE__).realpath) 12 | 13 | require "rubygems" 14 | require "bundler/setup" 15 | 16 | load Gem.bin_path("rspec-core", "rspec") 17 | -------------------------------------------------------------------------------- /bin/unicorn: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # 3 | # This file was generated by Bundler. 4 | # 5 | # The application 'unicorn' is installed as part of a gem, and 6 | # this file is here to facilitate running it. 7 | # 8 | 9 | require "pathname" 10 | ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../../Gemfile", 11 | Pathname.new(__FILE__).realpath) 12 | 13 | require "rubygems" 14 | require "bundler/setup" 15 | 16 | load Gem.bin_path("unicorn", "unicorn") 17 | -------------------------------------------------------------------------------- /catalog-info.yaml: -------------------------------------------------------------------------------- 1 | # This file records information about this repo. Its use is described in OEP-0055: 2 | # https://open-edx-proposals.readthedocs.io/en/latest/processes/oep-0055/decisions/0001-use-backstage-to-support-maintainers.html 3 | 4 | apiVersion: backstage.io/v1alpha1 5 | kind: Component 6 | metadata: 7 | name: 'cs_comments_service' 8 | description: "Forum service backend for Open edX." 9 | links: 10 | - url: "https://github.com/openedx/cs_comments_service/" 11 | title: "cs_comments_service" 12 | icon: "Web" 13 | annotations: 14 | openedx.org/arch-interest-groups: "" 15 | openedx.org/release: "master" 16 | spec: 17 | owner: group:edx-infinity 18 | type: 'service' 19 | lifecycle: 'production' 20 | -------------------------------------------------------------------------------- /config.ru: -------------------------------------------------------------------------------- 1 | # It is possible that the rack time out here is set to a different value than 2 | # on the edx-platform django_comment_client timeout. An attempt was made to 3 | # move these two values closer together (5s django_client_comment, 6s 4 | # cs_comments_service from 20). This resulted in more reported timeout errors 5 | # on the cs_comments_service side which better reflected the timeout errors the 6 | # django_comment_client. On the downside, the shorter timeout lead to less time 7 | # for processing longer queries in the background. The timeout has been set back 8 | # to 20s. Until these slow queries that benefit from being cached in the 9 | # background are resolved, reducing the timeout is not suggested. 10 | # More conversation at https://github.com/openedx/cs_comments_service/pull/146 11 | # -Nov 18th, 2015 12 | 13 | puts "Loading config.ru." 14 | 15 | require "rack-timeout" 16 | use Rack::Timeout 17 | 18 | require "mongoid" 19 | use Mongoid::QueryCache::Middleware 20 | 21 | require './app' 22 | run Sinatra::Application 23 | 24 | -------------------------------------------------------------------------------- /config/application.yml: -------------------------------------------------------------------------------- 1 | level_limit: 3 2 | api_key: <%= ENV['API_KEY'] || 'PUT_YOUR_API_KEY_HERE' %> 3 | elasticsearch_server: <%= ENV['SEARCH_SERVER_ES7'] || 'http://localhost:9200' %> 4 | max_deep_search_comment_count: 5000 5 | enable_search: true 6 | default_locale: <%= ENV['SERVICE_LANGUAGE'] || 'en-US' %> 7 | manual_pagination_batch_size: <%= ENV['MANUAL_PAGINATION_BATCH_SIZE'] || 500 %> 8 | thread_response_default_size: <%= ENV['THREAD_RESPONSE_DEFAULT_SIZE'] || 100 %> 9 | thread_response_size_limit: <%= ENV['THREAD_RESPONSE_SIZE_LIMIT'] || 200 %> 10 | elasticsearch_index_prefix: <%= ENV['ELASTICSEARCH_INDEX_PREFIX'] || "" %> 11 | <% if ENV['ELASTICSEARCH_CA_PATH'] %> 12 | elasticsearch_transport_options: 13 | ssl: 14 | ca_file: <%= ENV['ELASTICSEARCH_CA_PATH'] %> 15 | <% else %> 16 | elasticsearch_transport_options: {} 17 | <% end %> 18 | -------------------------------------------------------------------------------- /config/benchmark.yml.sample: -------------------------------------------------------------------------------- 1 | seed_size: 2 | commentables: 20 3 | users: 100 4 | threads: 100 5 | top_comments: 100 6 | sub_comments: 200 7 | votes: 1000 8 | tags: 100 9 | query_amount: 10 | course_thread_query: 1000 11 | -------------------------------------------------------------------------------- /config/mongoid.yml: -------------------------------------------------------------------------------- 1 | common: &default_client 2 | options: 3 | write: 4 | w: <%= ENV['MONGOID_WRITE_MODE'] || 1 %> 5 | read: 6 | mode: :<%= ENV['MONGOID_READ_MODE'] || 'primary' %> 7 | max_retries: <%= ENV['MONGOID_MAX_RETRIES'] || 1 %> 8 | retry_interval: <%= ENV['MONGOID_RETRY_INTERVAL'] || 0 %> 9 | connect_timeout: <%= ENV['MONGOID_CONNECT_TIMEOUT'] || 0.5 %> 10 | ssl: <%= ENV['MONGOID_USE_SSL'] || false %> 11 | auth_source: <%= ENV['MONGOID_AUTH_SOURCE'] || '' %> 12 | auth_mech: <%= ENV['MONGOID_AUTH_MECH'].nil? ? ':scram' : ENV['MONGOID_AUTH_MECH'] %> 13 | 14 | common_uri: &default_uri 15 | uri: <%= ENV['MONGOHQ_URL'] %> 16 | 17 | development: 18 | clients: 19 | default: 20 | <<: *default_uri 21 | <<: *default_client 22 | 23 | test: 24 | clients: 25 | default: 26 | <<: *default_uri 27 | <<: *default_client 28 | 29 | production: 30 | clients: 31 | default: 32 | <<: *default_uri 33 | <<: *default_client 34 | 35 | edgeprod: 36 | clients: 37 | default: 38 | <<: *default_uri 39 | <<: *default_client 40 | 41 | edgestage: 42 | clients: 43 | default: 44 | <<: *default_uri 45 | <<: *default_client 46 | 47 | staging: 48 | clients: 49 | default: 50 | <<: *default_uri 51 | <<: *default_client 52 | 53 | loadtest: 54 | clients: 55 | default: 56 | <<: *default_uri 57 | <<: *default_client 58 | 59 | defaults: &defaults 60 | use_utc: false 61 | use_activesupport_time_zone: true 62 | -------------------------------------------------------------------------------- /config/unicorn.heroku.rb: -------------------------------------------------------------------------------- 1 | worker_processes 4 2 | timeout 25 3 | preload_app true 4 | 5 | before_fork do |server, worker| 6 | Signal.trap 'TERM' do 7 | puts 'Unicorn master intercepting TERM and sending myself QUIT instead' 8 | Process.kill 'QUIT', Process.pid 9 | end 10 | end 11 | 12 | after_fork do |server, worker| 13 | Signal.trap 'TERM' do 14 | puts 'Unicorn worker intercepting TERM and doing nothing. Waiting for master to send QUIT' 15 | end 16 | ::Mongoid.default_client.close 17 | end 18 | -------------------------------------------------------------------------------- /config/unicorn.rb: -------------------------------------------------------------------------------- 1 | require 'tmpdir' 2 | 3 | # Load app.rb to get all dependencies. 4 | require File.expand_path('../../app.rb', __FILE__) 5 | 6 | # Make sure elasticsearch is configured correctly 7 | UnicornHelpers.exit_on_invalid_index 8 | 9 | worker_processes Integer(ENV['WORKER_PROCESSES'] || 4) 10 | timeout 25 11 | preload_app true 12 | data_dir = ENV['DATA_DIR'] || Dir.tmpdir 13 | listen "unix:#{data_dir}/forum.sock", :backlog => 512 14 | pid "#{data_dir}/forum_unicorn.pid" 15 | 16 | after_fork do |server, worker| 17 | ::Mongoid.default_client.close 18 | end 19 | -------------------------------------------------------------------------------- /config/unicorn_tcp.rb: -------------------------------------------------------------------------------- 1 | require 'tmpdir' 2 | 3 | # Load app.rb to get all dependencies. 4 | require File.expand_path('../../app.rb', __FILE__) 5 | 6 | # Make sure elasticsearch is configured correctly 7 | UnicornHelpers.exit_on_invalid_index 8 | 9 | worker_processes Integer(ENV['WORKER_PROCESSES'] || 4) 10 | timeout 25 11 | preload_app true 12 | 13 | service_name = 'forum' 14 | if ENV['ENABLE_DATA_DOG'] 15 | require 'ddtrace' 16 | # Add Datadog APM configuration 17 | Datadog.configure do |c| 18 | c.tracing.instrument :rails, service_name: service_name 19 | c.tracing.instrument :sinatra, service_name: service_name 20 | end 21 | end 22 | 23 | listen_host = ENV['LISTEN_HOST'] || '0.0.0.0' 24 | listen_port = ENV['LISTEN_PORT'] || '4567' 25 | listen "#{listen_host}:#{listen_port}", :tcp_nopush => true, :backlog => 512 26 | 27 | data_dir = ENV['DATA_DIR'] || Dir.tmpdir 28 | pid "#{data_dir}/forum_unicorn.pid" 29 | 30 | after_fork do |server, worker| 31 | ::Mongoid.default_client.close 32 | ::Mongoid.default_client.reconnect 33 | end 34 | 35 | before_fork do |server, worker| 36 | ::Mongoid.disconnect_clients 37 | end 38 | -------------------------------------------------------------------------------- /lib/tasks/benchmark.rake: -------------------------------------------------------------------------------- 1 | require 'rest_client' 2 | 3 | PREFIX = "http://localhost:4567/api/v1" 4 | 5 | namespace :benchmark do 6 | task :bulk_generate => :environment do 7 | 8 | seed_size_config = YAML.load_file("config/benchmark.yml").with_indifferent_access[:seed_size] 9 | 10 | COMMENTABLES = seed_size_config[:commentables] 11 | USERS = seed_size_config[:users] 12 | THREADS = seed_size_config[:threads] 13 | TOP_COMMENTS = seed_size_config[:top_comments] 14 | SUB_COMMENTS = seed_size_config[:sub_comments] 15 | VOTES = seed_size_config[:votes] 16 | TAGS = seed_size_config[:tags] 17 | 18 | Benchmark.bm(31) do |x| 19 | 20 | RestClient.get "#{PREFIX}/clean" 21 | 22 | x.report "create users" do 23 | (1..USERS).each do |user_id| 24 | data = { id: user_id, username: "user#{user_id}" } 25 | RestClient.post "#{PREFIX}/users", data 26 | end 27 | end 28 | 29 | x.report "create new threads" do 30 | (1..THREADS).each do |t| 31 | data = {title: Faker::Lorem.sentence(word_count: 6) + " token#{rand(10)} token#{rand(10)}", body: Faker::Lorem.paragraphs.join("\n\n") + " token#{rand(10)} token#{rand(10)}", anonymous: false, \ 32 | course_id: "1", user_id: (rand(USERS) + 1).to_s, \ 33 | tags: (1..5).map{|x| "tag#{rand(TAGS)}"}.join(",")} 34 | 35 | RestClient.post "#{PREFIX}/question_#{rand(COMMENTABLES).to_s}/threads", data 36 | 37 | end 38 | end 39 | 40 | comment_thread_ids = CommentThread.all.to_a.map(&:id) 41 | 42 | x.report("create top comments") do 43 | TOP_COMMENTS.times do 44 | data = {body: Faker::Lorem.paragraphs.join("\n\n") + " token#{rand(10)} token#{rand(10)}", anonymous: false, 45 | course_id: "1", user_id: (rand(USERS) + 1).to_s} 46 | RestClient.post "#{PREFIX}/threads/#{comment_thread_ids.sample}/comments", data 47 | 48 | end 49 | end 50 | 51 | top_comment_ids = Comment.all.to_a.map(&:id) 52 | 53 | x.report("create sub comments") do 54 | SUB_COMMENTS.times do 55 | data = {body: Faker::Lorem.paragraphs.join("\n\n") + " token#{rand(10)} token#{rand(10)}", anonymous: false, 56 | course_id: "1", user_id: (rand(USERS) + 1).to_s} 57 | RestClient.post "#{PREFIX}/comments/#{top_comment_ids.sample}", data 58 | 59 | end 60 | end 61 | 62 | x.report("create votes") do 63 | VOTES.times do 64 | data = {user_id: (rand(USERS) + 1).to_s, value: [:up, :down].sample} 65 | RestClient.put "#{PREFIX}/threads/#{comment_thread_ids.sample}/votes", data 66 | RestClient.put "#{PREFIX}/comments/#{top_comment_ids.sample}/votes", data 67 | end 68 | end 69 | end 70 | end 71 | task :bulk_query => :environment do 72 | 73 | query_amount_config = YAML.load_file("config/benchmark.yml").with_indifferent_access[:query_amount] 74 | 75 | COURSE_THREAD_QUERY = query_amount_config[:course_thread_query] 76 | 77 | Benchmark.bm(31) do |x| 78 | sort_keys = %w[date activity votes comments] 79 | 80 | x.report("querying threads in a course") do 81 | 82 | (1..COURSE_THREAD_QUERY).each do |seed| 83 | query_params = { course_id: "1", sort_key: sort_keys[seed % 4], page: seed % 5 + 1, per_page: 5 } 84 | RestClient.get "#{PREFIX}/threads", params: query_params 85 | end 86 | end 87 | x.report("searching threads in a course") do 88 | 89 | (1..COURSE_THREAD_QUERY).each do |seed| 90 | query_params = { course_id: "1", text: "token#{seed % 10} token#{(seed * seed) % 10}", sort_key: sort_keys[seed % 4], page: seed % 5 + 1, per_page: 5 } 91 | RestClient.get "#{PREFIX}/search/threads", params: query_params 92 | end 93 | end 94 | end 95 | end 96 | end 97 | -------------------------------------------------------------------------------- /lib/tasks/db.rake: -------------------------------------------------------------------------------- 1 | require 'factory_bot' 2 | 3 | namespace :db do 4 | FactoryBot.find_definitions 5 | 6 | def create_test_user(id) 7 | User.create!(external_id: id, username: "user#{id}") 8 | end 9 | 10 | task :init => :environment do 11 | puts 'recreating indexes...' 12 | [Comment, CommentThread, User, Notification, Subscription, Activity, Delayed::Backend::Mongoid::Job].each(&:remove_indexes).each(&:create_indexes) 13 | puts 'finished' 14 | end 15 | 16 | task :clean => :environment do 17 | Comment.delete_all 18 | CommentThread.delete_all 19 | User.delete_all 20 | Notification.delete_all 21 | Subscription.delete_all 22 | end 23 | 24 | THREADS_PER_COMMENTABLE = 20 25 | TOP_COMMENTS_PER_THREAD = 3 26 | ADDITIONAL_COMMENTS_PER_THREAD = 5 27 | 28 | COURSE_ID = 'MITx/6.002x/2012_Fall' 29 | 30 | def generate_comments_for(commentable_id, num_threads=THREADS_PER_COMMENTABLE, num_top_comments=TOP_COMMENTS_PER_THREAD, num_subcomments=ADDITIONAL_COMMENTS_PER_THREAD) 31 | level_limit = CommentService.config['level_limit'] 32 | 33 | 34 | users = User.all.to_a 35 | 36 | puts "Generating threads and comments for #{commentable_id}..." 37 | 38 | threads = [] 39 | top_comments = [] 40 | additional_comments = [] 41 | 42 | num_threads.times do 43 | inner_top_comments = [] 44 | 45 | # Create a new thread 46 | comment_thread = FactoryBot::create(:comment_thread, commentable_id: commentable_id, author: users.sample, course_id: COURSE_ID) 47 | threads << comment_thread 48 | 49 | # Subscribe a few users to the thread 50 | users.sample(3).each { |user| user.subscribe(comment_thread) } 51 | 52 | # Create a few top-level comments for the thread 53 | (1 + rand(num_top_comments)).times do 54 | endorsed = [true, false].sample 55 | comment = FactoryBot::create(:comment, author: users.sample, comment_thread: comment_thread, endorsed: endorsed, course_id: COURSE_ID) 56 | top_comments << comment 57 | inner_top_comments << comment 58 | end 59 | 60 | # Created additional nested comments 61 | parent_comments = inner_top_comments 62 | (level_limit-1).times do 63 | current_level_comments = [] 64 | (1 + rand(num_subcomments)).times do 65 | parent = parent_comments.sample 66 | endorsed = [true, false].sample 67 | child = FactoryBot::create(:comment, author: users.sample, parent: parent, endorsed: endorsed) 68 | current_level_comments << child 69 | end 70 | parent_comments = current_level_comments 71 | end 72 | end 73 | 74 | puts 'voting' 75 | 76 | (threads + top_comments + additional_comments).each do |c| 77 | users.each do |user| 78 | user.vote(c, [:up, :down].sample) 79 | end 80 | end 81 | puts 'finished' 82 | end 83 | 84 | 85 | task :generate_comments, [:commentable_id, :num_threads, :num_top_comments, :num_subcomments] => :environment do |t, args| 86 | args.with_defaults(num_threads: THREADS_PER_COMMENTABLE, 87 | num_top_comments: TOP_COMMENTS_PER_THREAD, 88 | num_subcomments: ADDITIONAL_COMMENTS_PER_THREAD) 89 | generate_comments_for(args[:commentable_id], args[:num_threads], args[:num_top_comments], args[:num_subcomments]) 90 | 91 | end 92 | 93 | task :seed => [:environment, :clean] do 94 | beginning_time = Time.now 95 | 96 | (1..10).map { |id| create_test_user(id) } 97 | generate_comments_for('video_1') 98 | generate_comments_for('lab_1') 99 | generate_comments_for('lab_2') 100 | 101 | end_time = Time.now 102 | 103 | puts "Number of comments generated: #{Comment.count}" 104 | puts "Number of comment threads generated: #{CommentThread.count}" 105 | 106 | puts "Time elapsed #{(end_time - beginning_time)*1000} milliseconds" 107 | 108 | end 109 | 110 | task :add_anonymous_to_peers => :environment do 111 | Content.collection.find(:anonymous_to_peers => nil).update_all({'$set' => {anonymous_to_peers: false}}) 112 | end 113 | 114 | end 115 | -------------------------------------------------------------------------------- /lib/tasks/deep_search.rake: -------------------------------------------------------------------------------- 1 | require 'rest_client' 2 | roots = {} 3 | roots['development'] = "http://localhost:8000" 4 | roots['test'] = "http://localhost:8000" 5 | roots['production'] = "http://edx.org" 6 | roots['staging'] = "http://stage.edx.org" 7 | ROOT = roots[ENV['SINATRA_ENV']] 8 | 9 | namespace :deep_search do 10 | 11 | task :performance => :environment do 12 | #USAGE 13 | #SINATRA_ENV=development rake kpis:prolific 14 | #or 15 | #SINATRA_ENV=development bundle exec rake kpis:prolific 16 | 17 | #create comment and thread bodies 18 | bodies = [] 19 | 20 | 50.times do |i| 21 | bodies << (0...8).map{ ('a'..'z').to_a[rand(26)] }.join 22 | end 23 | 24 | parents = CommentThread.limit(100) 25 | #now create comments and threads with hits 26 | 27 | puts "Manufacturing Threads" 28 | 100.times do |j| 29 | (1..5).to_a.sample.times do |i| 30 | c = CommentThread.new 31 | c.course_id = 'sample course' 32 | c.title = 'sample title' 33 | c.commentable_id = 'sample commetable' 34 | c.body = bodies.sample 35 | c.author = 1 36 | c.save 37 | end 38 | end 39 | 40 | puts "Manufacturing Comments" 41 | 100.times do |j| 42 | (1..5).to_a.sample.times do |i| 43 | c = Comment.new 44 | c.course_id = 'sample course' 45 | c.body = bodies.sample 46 | c.comment_thread_id = parents.sample.id 47 | c.author = 1 48 | c.save 49 | end 50 | end 51 | 52 | sort_keys = %w[date activity votes comments] 53 | 54 | #set the sinatra env to test to avoid 401'ing 55 | set :environment, :test 56 | 57 | start_time = Time.now 58 | puts "Starting test at #{start_time}" 59 | 1000.times do |i| 60 | query_params = { course_id: "1", sort_key: sort_keys.sample, page: 1, per_page: 5, text: bodies.sample } 61 | RestClient.get "#{PREFIX}/threads", params: query_params 62 | end 63 | end_time = Time.now 64 | puts "Ending test at #{end_time}" 65 | puts "Total Time: #{(end_time - start_time).to_f} seconds" 66 | 67 | end 68 | 69 | end 70 | -------------------------------------------------------------------------------- /lib/tasks/flags.rake: -------------------------------------------------------------------------------- 1 | require 'rest_client' 2 | roots = {} 3 | roots['development'] = "http://localhost:8000" 4 | roots['test'] = "http://localhost:8000" 5 | roots['production'] = "http://edx.org" 6 | ROOT = roots[ENV['SINATRA_ENV']] 7 | 8 | namespace :flags do 9 | 10 | 11 | #USAGE 12 | #SINATRA_ENV=development rake flags:flagged 13 | 14 | task :flagged => :environment do 15 | flagged = Content.flagged 16 | 17 | courses = {} 18 | 19 | flagged.each do |f| 20 | 21 | if not courses[f.course_id] 22 | courses[f.course_id] = [] 23 | end 24 | 25 | courses[f.course_id] << f 26 | end 27 | 28 | courses.each do |k,v| 29 | puts "#{k.upcase}" 30 | puts "****************" 31 | v.each do |f| 32 | puts "#{ROOT}/courses/#{f.course_id}/discussion/forum/#{f.commentable_id}/threads/#{f.comment_thread_id} (#{f.class})" 33 | end 34 | puts "\n\n\n\n" 35 | end 36 | 37 | end 38 | end 39 | -------------------------------------------------------------------------------- /lib/tasks/i18n.rake: -------------------------------------------------------------------------------- 1 | namespace :i18n do 2 | desc "Push source strings to Transifex for translation" 3 | task :push do 4 | sh("tx push -s") 5 | end 6 | 7 | desc "Pull translated strings from Transifex" 8 | task :pull do 9 | sh("tx pull --mode=reviewed --all --minimum-perc=1") 10 | end 11 | 12 | desc "Clean the locale directory" 13 | task :clean do 14 | sh("git clean -f locale/") 15 | end 16 | 17 | desc "Commit translated strings to the repository" 18 | task :commit => ["i18n:clean", "i18n:pull"] do 19 | sh("git add locale") 20 | sh("git commit -m 'Updated translations (autogenerated message)'") 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /lib/tasks/jobs.rake: -------------------------------------------------------------------------------- 1 | namespace :jobs do 2 | desc "Clear the delayed_job queue." 3 | task :clear => :environment do 4 | Delayed::Job.delete_all 5 | end 6 | 7 | desc "Start a delayed_job worker." 8 | task :work => :environment do 9 | Delayed::Worker.new(:min_priority => ENV['MIN_PRIORITY'], :max_priority => ENV['MAX_PRIORITY'], :queues => (ENV['QUEUES'] || ENV['QUEUE'] || '').split(','), :quiet => false).start 10 | end 11 | end 12 | -------------------------------------------------------------------------------- /lib/tasks/kpis.rake: -------------------------------------------------------------------------------- 1 | require 'rest_client' 2 | roots = {} 3 | roots['development'] = "http://localhost:8000" 4 | roots['test'] = "http://localhost:8000" 5 | roots['production'] = "http://edx.org" 6 | roots['staging'] = "http://stage.edx.org" 7 | ROOT = roots[ENV['SINATRA_ENV']] 8 | 9 | namespace :kpis do 10 | 11 | task :prolific => :environment do 12 | #USAGE 13 | #SINATRA_ENV=development rake kpis:prolific 14 | #or 15 | #SINATRA_ENV=development bundle exec rake kpis:prolific 16 | 17 | courses = Content.all.distinct("course_id") 18 | puts "\n\n*********************************************************************" 19 | puts " Users who have created the most forum content on edX (#{Date.today}) " 20 | puts "*********************************************************************\n\n" 21 | 22 | courses.each do |c| 23 | contributors = Content.prolific_metric({"course_id" => c}, 10) 24 | #now output 25 | puts c 26 | puts "*********************" 27 | contributors.each do |p| 28 | url = ROOT + "/courses/#{c}/discussion/forum/users/#{p['_id']}" 29 | count_string = "#{p['value'].to_i} contributions:".rjust(25) 30 | puts "#{count_string} #{url} " 31 | end 32 | puts "\n" 33 | 34 | end 35 | end 36 | 37 | 38 | task :starters => :environment do 39 | #USAGE 40 | #SINATRA_ENV=development rake kpis:starters 41 | #or 42 | #SINATRA_ENV=development bundle exec rake kpis:starters 43 | 44 | courses = Content.all.distinct("course_id") 45 | puts "\n\n*********************************************************************" 46 | puts " Users who have started the most threads on edX (#{Date.today}) " 47 | puts "*********************************************************************\n\n" 48 | 49 | courses.each do |c| 50 | contributors = Content.prolific_metric({"course_id" => c, "_type" => "CommentThread"}, 10) 51 | #now output 52 | puts c 53 | puts "*********************" 54 | contributors.each do |p| 55 | url = ROOT + "/courses/#{c}/discussion/forum/users/#{p['_id']}" 56 | count_string = "#{p['value'].to_i} contributions:".rjust(25) 57 | puts "#{count_string} #{url} " 58 | end 59 | puts "\n" 60 | 61 | end 62 | end 63 | 64 | task :ppu => :environment do 65 | #USAGE 66 | #SINATRA_ENV=development rake kpis:ppu 67 | #or 68 | #SINATRA_ENV=development bundle exec rake kpis:ppu 69 | 70 | courses = Content.all.distinct("course_id") 71 | puts "\n\n*********************************************************************" 72 | puts "Average threads per contributing user per course on edX (#{Date.today}) " 73 | puts "*********************************************************************\n\n" 74 | 75 | courses.each do |c| 76 | #first, get all the users who have contributed 77 | contributors = Content.prolific_metric({"course_id" => c}, 10) 78 | total_users = contributors.count 79 | 80 | #now, get the threads 81 | 82 | total_threads = Content.where("_type" => "CommentThread","course_id" => c).count 83 | 84 | ratio = total_threads.to_f / total_users.to_f 85 | 86 | #now output 87 | puts c 88 | puts "*********************" 89 | puts "Total Threads: #{total_threads}" 90 | puts "Total Users: #{total_users}" 91 | puts "Average Thread/User: #{ratio}" 92 | puts "\n" 93 | 94 | end 95 | end 96 | 97 | task :epu => :environment do 98 | #USAGE 99 | #SINATRA_ENV=development rake kpis:epu 100 | #or 101 | #SINATRA_ENV=development bundle exec rake kpis:epu 102 | 103 | courses = Content.all.distinct("course_id") 104 | puts "\n\n*****************************************************************************************************************" 105 | puts "Average contributions (votes, threads, or comments) per contributing user per course on edX (#{Date.today}) " 106 | puts "*********************************************************************************************************************\n\n" 107 | 108 | courses.each do |c| 109 | #first, get all the users who have contributed 110 | summary = Content.summary({"course_id" => c}) 111 | total_users = summary["contributor_count"] 112 | total_activity = summary['thread_count'] 113 | total_activity += summary['comment_count'] 114 | total_activity += summary['vote_count'] 115 | ratio = total_activity.to_f / total_users.to_f 116 | 117 | 118 | puts c 119 | puts "*********************" 120 | puts "Total Threads: #{summary['thread_count']}" 121 | puts "Total Comments: #{summary['comment_count']}" 122 | puts "Total Votes: #{summary['vote_count']}\n\n" 123 | puts "Total Users: #{summary['contributor_count']}" 124 | puts "Total Engagements: #{total_activity}\n\n" 125 | puts "Average Engagement Per Engaging User: #{ratio}\n\n\n " 126 | 127 | end 128 | end 129 | 130 | task :orphans => :environment do 131 | #USAGE 132 | #SINATRA_ENV=development rake kpis:orphans 133 | #or 134 | #SINATRA_ENV=development bundle exec rake kpis:orphans 135 | 136 | courses = Content.all.distinct("course_id") 137 | puts "\n\n****************************************************" 138 | puts "thread reply rate per course on edX (#{Date.today}) " 139 | puts "****************************************************\n\n" 140 | 141 | courses.each do |c| 142 | #first, get all the users who have contributed 143 | threads = Content.where({"course_id" => c, "_type" => "CommentThread"}) 144 | orphans = Content.where({"course_id" => c, "_type" => "CommentThread", "comment_count" => 0}) 145 | 146 | ratio = orphans.count.to_f / threads.count.to_f 147 | 148 | puts c 149 | puts "*********************" 150 | puts "Total Threads: #{threads.count}" 151 | puts "Total Orphaned Threads: #{orphans.count}" 152 | if threads.count > 0 153 | puts "Orphan Ratio: #{(ratio*1000).round.to_f/10.0}%" 154 | end 155 | puts "\n\n\n" 156 | end 157 | end 158 | end 159 | -------------------------------------------------------------------------------- /lib/tasks/search.rake: -------------------------------------------------------------------------------- 1 | require_relative '../task_helpers' 2 | 3 | namespace :search do 4 | desc 'Indexes content updated in the last N minutes.' 5 | task :catchup, [:comments_index_name, :comment_threads_index_name, :minutes, :batch_size] => :environment do |t, args| 6 | start_time = Time.now - (args[:minutes].to_i * 60) 7 | args.with_defaults(:batch_size => 500) 8 | indices = [args[:comments_index_name].to_s, args[:comment_threads_index_name].to_s] 9 | TaskHelpers::ElasticsearchHelper.catchup_indices(indices, start_time, args[:batch_size].to_i) 10 | end 11 | 12 | desc 'Rebuilds new indices of all data from the database.' 13 | task :rebuild_indices, [:batch_size, :extra_catchup_minutes] => :environment do |t, args| 14 | args.with_defaults(:batch_size => 500) 15 | args.with_defaults(:extra_catchup_minutes => 5) # additional catchup time in minutes 16 | 17 | TaskHelpers::ElasticsearchHelper.rebuild_indices( 18 | args[:batch_size].to_i, 19 | args[:extra_catchup_minutes].to_i 20 | ) 21 | end 22 | 23 | desc 'Creates a new search indices' 24 | task :initialize, [:force_new_index] => :environment do |t, args| 25 | # When force_new_index is true, a fresh index for "content" alias is created even if the 26 | # "content" alias already exists. 27 | args.with_defaults(:force_new_index => false) 28 | # WARNING: if "content" is an index and not an alias, it will be deleted and recreated 29 | # no matter what is supplied for the force argument 30 | TaskHelpers::ElasticsearchHelper.initialize_indices(args[:force_new_index]) 31 | end 32 | 33 | desc 'Validates that the "content" alias exists with expected field mappings and types.' 34 | task :validate_indices => :environment do 35 | TaskHelpers::ElasticsearchHelper.validate_indices 36 | end 37 | 38 | end 39 | -------------------------------------------------------------------------------- /lib/tasks/user_stats.rake: -------------------------------------------------------------------------------- 1 | require 'logger' 2 | namespace :user_stats do 3 | 4 | logger = Logger.new(STDOUT) 5 | 6 | desc 'Updates discussion stats for users in a course' 7 | task :update_stats, [:course_id] => :environment do |t, args| 8 | if args[:course_id] 9 | updated_users = update_all_users_in_course args[:course_id] 10 | logger.info "Updated stats for #{updated_users.length} users" 11 | else 12 | abort "Course id is required" 13 | end 14 | end 15 | 16 | end 17 | -------------------------------------------------------------------------------- /lib/unicorn_helpers.rb: -------------------------------------------------------------------------------- 1 | module UnicornHelpers 2 | 3 | # Make sure elasticsearch is configured correctly 4 | def self.exit_on_invalid_index 5 | begin 6 | TaskHelpers::ElasticsearchHelper.validate_indices 7 | rescue => e 8 | # Magic exit code expected by forum-supervisor.sh for when 9 | # rake search:validate_indices fails 10 | STDERR.puts "ERROR: ElasticSearch configuration validation failed. "\ 11 | "\"rake search:validate_indices\" failed with the following message: #{e.message}" 12 | exit(101) 13 | end 14 | end 15 | 16 | end 17 | -------------------------------------------------------------------------------- /lib/utils.rb: -------------------------------------------------------------------------------- 1 | class NilClass 2 | def to_hash 3 | {} 4 | end 5 | end 6 | -------------------------------------------------------------------------------- /locale/en-US.yml: -------------------------------------------------------------------------------- 1 | en-US: 2 | requested_object_not_found: "requested object not found" 3 | user_id_is_required: "User id is required" 4 | source_type_must_be_user_thread_or_other: "Source type must be 'user', 'thread' or 'other'" 5 | value_is_required: "Value is required" 6 | value_is_invalid: "Value is invalid" 7 | anonymous: "anonymous" 8 | blocked_content_with_body_hash: "blocked content with body hash %{hash}" 9 | param_must_be_a_non_negative_number: "%{param} must be a non-negative number" 10 | param_must_be_a_number_greater_than_zero: "%{param} must be a number greater than zero" 11 | param_exceeds_limit: "%{param} exceeds limit: %{limit}" 12 | cannot_specify_group_id_and_group_ids: "Cannot specify both group_id and group_ids as filters." 13 | -------------------------------------------------------------------------------- /locale/x-test.yml: -------------------------------------------------------------------------------- 1 | x-test: 2 | requested_object_not_found: "##x-test## requested object not found" 3 | user_id_is_required: "##x-test## User id is required" 4 | source_type_must_be_user_thread_or_other: "##x-test## Source type must be 'user', 'thread' or 'other'" 5 | value_is_required: "##x-test## Value is required" 6 | value_is_invalid: "##x-test## Value is invalid" 7 | anonymous: "##x-test## anonymous" 8 | blocked_content_with_body_hash: "##x-test## blocked content with body hash %{hash}" 9 | param_must_be_a_non_negative_number: "##x-test## %{param} must be a non-negative number" 10 | param_must_be_a_number_greater_than_zero: "##x-test## %{param} must be a number greater than zero" 11 | param_exceeds_limit: "##x-test## %{param} exceeds limit: %{limit}" 12 | cannot_specify_group_id_and_group_ids: "##x-test## Cannot specify both group_id and group_ids as filters." 13 | -------------------------------------------------------------------------------- /models/activity.rb: -------------------------------------------------------------------------------- 1 | class Activity 2 | include Mongoid::Document 3 | include Mongoid::Timestamps 4 | 5 | field :anonymous, type: Boolean 6 | field :activity_type, type: String 7 | field :happend_at, type: Time 8 | 9 | belongs_to :actor, class_name: "User", inverse_of: :activities, index: true, autosave: true 10 | belongs_to :target, inverse_of: :activities, polymorphic: true, index: true, autosave: true 11 | 12 | validates_presence_of :actor 13 | #validates_presence_of :target 14 | 15 | 16 | end 17 | -------------------------------------------------------------------------------- /models/comment.rb: -------------------------------------------------------------------------------- 1 | require 'logger' 2 | require_relative 'concerns/searchable' 3 | require_relative 'content' 4 | require_relative 'constants' 5 | require_relative 'edit_history' 6 | 7 | logger = Logger.new(STDOUT) 8 | logger.level = Logger::WARN 9 | 10 | class Comment < Content 11 | include Mongoid::Tree 12 | include Mongoid::Timestamps 13 | include Mongoid::MagicCounterCache 14 | include ActiveModel::MassAssignmentSecurity 15 | include Elasticsearch::Model 16 | include Searchable 17 | 18 | voteable self, :up => +1, :down => -1 19 | 20 | field :course_id, type: String 21 | field :body, type: String 22 | field :endorsed, type: Boolean, default: false 23 | field :endorsement, type: Hash 24 | field :anonymous, type: Boolean, default: false 25 | field :anonymous_to_peers, type: Boolean, default: false 26 | field :commentable_id, type: String 27 | field :at_position_list, type: Array, default: [] 28 | field :sk, type: String, default: nil 29 | field :child_count, type: Integer 30 | field :retired_username, type: String, default: nil 31 | 32 | index({author_id: 1, course_id: 1}) 33 | index({_type: 1, comment_thread_id: 1, author_id: 1, updated_at: 1}) 34 | index({comment_thread_id: 1, author_id: 1, created_at: 1}) 35 | 36 | index_name do 37 | prefix = ::CommentService.config[:elasticsearch_index_prefix] 38 | "#{prefix}comments" 39 | end 40 | 41 | mapping dynamic: 'false' do 42 | indexes :body, type: :text, store: true, term_vector: :with_positions_offsets 43 | indexes :course_id, type: :keyword 44 | indexes :comment_thread_id, type: :keyword 45 | indexes :commentable_id, type: :keyword 46 | indexes :group_id, type: :keyword 47 | indexes :context, type: :keyword 48 | indexes :created_at, type: :date 49 | indexes :updated_at, type: :date 50 | # NOTE: this field needs only for testing 51 | indexes :title, type: :keyword 52 | end 53 | 54 | def as_indexed_json(options={}) 55 | as_json(except: [:id, :_id]) 56 | end 57 | 58 | belongs_to :comment_thread, index: true 59 | belongs_to :author, class_name: 'User', inverse_of: :comments, index: true 60 | embeds_many :edit_history, cascade_callbacks: true 61 | 62 | attr_accessible :body, :course_id, :anonymous, :anonymous_to_peers, :endorsed, :endorsement, :retired_username 63 | 64 | validates_presence_of :comment_thread, autosave: false 65 | validates_presence_of :body 66 | validates_presence_of :course_id 67 | validates_presence_of :author, autosave: false 68 | 69 | counter_cache :comment_thread 70 | 71 | before_destroy :destroy_children 72 | before_create :set_thread_last_activity_at 73 | before_save :set_sk 74 | before_save do 75 | unless anonymous or anonymous_to_peers or not body_changed? 76 | author.update_activity_timestamp course_id 77 | end 78 | end 79 | after_destroy do 80 | unless anonymous or anonymous_to_peers 81 | if parent_id.nil? 82 | author.update_stats_for_course(course_id, responses: -1) 83 | else 84 | author.update_stats_for_course(course_id, replies: -1) 85 | end 86 | end 87 | end 88 | after_create do 89 | # Don't count anonymous posts 90 | unless anonymous or anonymous_to_peers 91 | if parent_id.nil? 92 | author.update_stats_for_course(course_id, responses: 1) 93 | else 94 | author.update_stats_for_course(course_id, replies: 1) 95 | end 96 | end 97 | end 98 | 99 | def self.hash_tree(nodes) 100 | nodes.map { |node, sub_nodes| node.to_hash.merge('children' => hash_tree(sub_nodes).compact) } 101 | end 102 | 103 | # This should really go somewhere else, but sticking it here for now. This is 104 | # used to flatten out the subtree fetched by calling self.subtree. This is 105 | # equivalent to calling descendants_and_self; however, calling 106 | # descendants_and_self and subtree both is very inefficient. It's cheaper to 107 | # just flatten out the subtree, and simpler than duplicating the code that 108 | # actually creates the subtree. 109 | def self.flatten_subtree(x) 110 | if x.is_a? Array 111 | x.flatten.map { |y| self.flatten_subtree(y) } 112 | elsif x.is_a? Hash 113 | x.to_a.map { |y| self.flatten_subtree(y) }.flatten 114 | else 115 | x 116 | end 117 | end 118 | 119 | def to_hash(params={}) 120 | sort_by_parent_and_time = Proc.new do |x, y| 121 | arr_cmp = x.parent_ids.map(&:to_s) <=> y.parent_ids.map(&:to_s) 122 | if arr_cmp != 0 123 | arr_cmp 124 | else 125 | x.created_at <=> y.created_at 126 | end 127 | end 128 | if params[:recursive] 129 | # TODO: remove and reuse the new hierarchical sort keys if possible 130 | subtree_hash = subtree(sort: sort_by_parent_and_time) 131 | self.class.hash_tree(subtree_hash).first 132 | else 133 | as_document 134 | .slice(BODY, COURSE_ID, ENDORSED, ENDORSEMENT, ANONYMOUS, ANONYMOUS_TO_PEERS, CREATED_AT, UPDATED_AT, AT_POSITION_LIST) 135 | .merge!("id" => _id, 136 | "user_id" => author_id, 137 | "username" => author_username, 138 | "depth" => depth, 139 | "closed" => comment_thread.nil? ? false : comment_thread.closed, 140 | "edit_history" => edit_history.map(&:to_hash), 141 | "thread_id" => comment_thread_id, 142 | "parent_id" => parent_ids[-1], 143 | "commentable_id" => comment_thread.nil? ? nil : comment_thread.commentable_id, 144 | "votes" => votes.slice(COUNT, UP_COUNT, DOWN_COUNT, POINT), 145 | "abuse_flaggers" => abuse_flaggers, 146 | "type" => COMMENT, 147 | "child_count" => get_cached_child_count) 148 | end 149 | end 150 | 151 | def get_cached_child_count 152 | update_cached_child_count if self.child_count.nil? 153 | self.child_count 154 | end 155 | 156 | def update_cached_child_count 157 | child_comments_count = Comment.where({"parent_id" => self._id}).count() 158 | self.set(child_count: child_comments_count) 159 | end 160 | 161 | def commentable_id 162 | return nil unless self.comment_thread 163 | self.comment_thread.commentable_id 164 | rescue Mongoid::Errors::DocumentNotFound 165 | nil 166 | end 167 | 168 | def group_id 169 | return nil unless self.comment_thread 170 | self.comment_thread.group_id 171 | rescue Mongoid::Errors::DocumentNotFound 172 | nil 173 | end 174 | 175 | def context 176 | return nil unless self.comment_thread 177 | self.comment_thread.context 178 | rescue Mongoid::Errors::DocumentNotFound 179 | nil 180 | end 181 | 182 | def course_context? 183 | self.context == 'course' 184 | end 185 | 186 | def standalone_context? 187 | self.context == 'standalone' 188 | end 189 | 190 | def self.by_date_range_and_thread_ids from_when, to_when, thread_ids 191 | #return all content between from_when and to_when 192 | 193 | self.where(:created_at.gte => (from_when)).where(:created_at.lte => (to_when)). 194 | where(:comment_thread_id.in => thread_ids) 195 | end 196 | 197 | private 198 | 199 | def set_thread_last_activity_at 200 | self.comment_thread.update_attribute(:last_activity_at, Time.now.utc) 201 | end 202 | 203 | def set_sk 204 | # this attribute is explicitly write-once 205 | if self.sk.nil? 206 | self.sk = (self.parent_ids.dup << self.id).join("-") 207 | end 208 | end 209 | 210 | begin 211 | require 'new_relic/agent/method_tracer' 212 | include ::NewRelic::Agent::MethodTracer 213 | add_method_tracer :to_hash 214 | rescue LoadError 215 | logger.warn "NewRelic agent library not installed" 216 | end 217 | end 218 | -------------------------------------------------------------------------------- /models/comment_thread.rb: -------------------------------------------------------------------------------- 1 | require 'logger' 2 | require_relative 'concerns/searchable' 3 | require_relative 'content' 4 | require_relative 'constants' 5 | require_relative 'edit_history' 6 | 7 | logger = Logger.new(STDOUT) 8 | logger.level = Logger::WARN 9 | 10 | 11 | class CommentThread < Content 12 | include Mongoid::Timestamps 13 | include Mongoid::Attributes::Dynamic 14 | include ActiveModel::MassAssignmentSecurity 15 | include Elasticsearch::Model 16 | include Searchable 17 | extend Enumerize 18 | 19 | voteable self, :up => +1, :down => -1 20 | 21 | field :thread_type, type: String, default: :discussion 22 | enumerize :thread_type, in: [:question, :discussion] 23 | field :context, type: String, default: :course 24 | enumerize :context, in: [:course, :standalone] 25 | field :comment_count, type: Integer, default: 0 26 | field :title, type: String 27 | field :body, type: String 28 | field :course_id, type: String 29 | field :commentable_id, type: String 30 | field :anonymous, type: Boolean, default: false 31 | field :anonymous_to_peers, type: Boolean, default: false 32 | field :closed, type: Boolean, default: false 33 | field :at_position_list, type: Array, default: [] 34 | field :last_activity_at, type: Time 35 | field :group_id, type: Integer 36 | field :pinned, type: Boolean 37 | field :retired_username, type: String, default: nil 38 | field :close_reason_code, type: String, default: nil # string code that represents why a thread was closed. 39 | 40 | index({ author_id: 1, course_id: 1 }) 41 | 42 | index_name do 43 | prefix = ::CommentService.config[:elasticsearch_index_prefix] 44 | "#{prefix}comment_threads" 45 | end 46 | 47 | mapping dynamic: 'false' do 48 | indexes :title, type: :text, boost: 5.0, store: true, term_vector: :with_positions_offsets 49 | indexes :body, type: :text, store: true, term_vector: :with_positions_offsets 50 | indexes :created_at, type: :date 51 | indexes :updated_at, type: :date 52 | indexes :last_activity_at, type: :date 53 | indexes :comment_count, type: :integer 54 | indexes :votes_point, type: :integer 55 | indexes :context, type: :keyword 56 | indexes :course_id, type: :keyword 57 | indexes :commentable_id, type: :keyword 58 | indexes :author_id, type: :keyword 59 | indexes :group_id, type: :integer 60 | indexes :id, type: :keyword 61 | indexes :thread_id, type: :keyword 62 | end 63 | 64 | def as_indexed_json(options={}) 65 | as_json(except: [:thread_id, :_id]) 66 | end 67 | 68 | belongs_to :author, class_name: 'User', inverse_of: :comment_threads, index: true 69 | belongs_to :closed_by, class_name: 'User', inverse_of: :threads_closed, optional: true 70 | has_many :comments, dependent: :destroy # Use destroy to invoke callback on the top-level comments 71 | has_many :activities, autosave: true 72 | embeds_many :edit_history, cascade_callbacks: true 73 | 74 | attr_accessible :title, :body, :course_id, :commentable_id, :anonymous, :anonymous_to_peers, :closed, 75 | :thread_type, :retired_username, :close_reason_code, :closed_by 76 | 77 | validates_presence_of :thread_type 78 | validates_presence_of :context 79 | validates_presence_of :title 80 | validates_presence_of :body 81 | validates_presence_of :course_id # do we really need this? 82 | validates_presence_of :commentable_id 83 | validates_presence_of :author, autosave: false 84 | 85 | before_create :set_last_activity_at 86 | after_update :clear_endorsements 87 | before_save do 88 | unless anonymous or anonymous_to_peers or not body_changed? 89 | author.update_activity_timestamp course_id 90 | end 91 | end 92 | before_destroy :destroy_subscriptions 93 | after_destroy do 94 | unless anonymous or anonymous_to_peers 95 | author.update_stats_for_course(course_id, threads: -1) 96 | end 97 | end 98 | after_create do 99 | # Don't count anonymous posts. 100 | unless anonymous or anonymous_to_peers 101 | author.update_stats_for_course(course_id, threads: 1) 102 | end 103 | end 104 | 105 | scope :active_since, ->(from_time) { where(:last_activity_at => {:$gte => from_time}) } 106 | scope :standalone_context, ->() { where(:context => :standalone) } 107 | scope :course_context, ->() { where(:context => :course) } 108 | 109 | def activity_since(from_time=nil) 110 | if from_time 111 | activities.where(:created_at => {:$gte => from_time}) 112 | else 113 | activities 114 | end 115 | end 116 | 117 | def activity_today 118 | activity_since(Date.today.to_time) 119 | end 120 | 121 | def activity_this_week 122 | activity_since(Date.today.to_time - 1.weeks) 123 | end 124 | 125 | def activity_this_month 126 | activity_since(Date.today.to_time - 1.months) 127 | end 128 | 129 | def activity_overall 130 | activity_since(nil) 131 | end 132 | 133 | def root_comments 134 | Comment.roots.where(comment_thread_id: self.id) 135 | end 136 | 137 | def commentable 138 | Commentable.find(commentable_id) 139 | end 140 | 141 | def subscriptions 142 | Subscription.where(source_id: id.to_s, source_type: self.class.to_s) 143 | end 144 | 145 | def subscribers 146 | subscriptions.map(&:subscriber) 147 | end 148 | 149 | def endorsed? 150 | comments.where(endorsed: true).exists? 151 | end 152 | 153 | def to_hash(params={}) 154 | as_document 155 | .slice(THREAD_TYPE, TITLE, BODY, COURSE_ID, ANONYMOUS, ANONYMOUS_TO_PEERS, COMMENTABLE_ID, CREATED_AT, UPDATED_AT, AT_POSITION_LIST, CLOSED, CONTEXT, LAST_ACTIVITY_AT, CLOSE_REASON_CODE) 156 | .merge!("id" => _id, 157 | "user_id" => author_id, 158 | "username" => author_username, 159 | "votes" => votes.slice(COUNT, UP_COUNT, DOWN_COUNT, POINT), 160 | "abuse_flaggers" => abuse_flaggers, 161 | "edit_history" => edit_history.map(&:to_hash), 162 | "closed_by" => closed_by? ? closed_by.username : nil, 163 | "tags" => [], 164 | "type" => THREAD, 165 | "group_id" => group_id, 166 | "pinned" => pinned?, 167 | "comments_count" => comment_count) 168 | end 169 | 170 | def comment_thread_id 171 | #so that we can use the comment thread id as a common attribute for flagging 172 | self.id 173 | end 174 | 175 | private 176 | 177 | def set_last_activity_at 178 | self.last_activity_at = Time.now.utc unless last_activity_at_changed? 179 | end 180 | 181 | def clear_endorsements 182 | if self.thread_type_changed? 183 | # We use 'set' instead of 'update_attributes' because the Comment model has a 'before_update' callback that sets 184 | # the last activity time on the thread. Therefore the callbacks would be mutually recursive and we end up with a 185 | # 'SystemStackError'. The 'set' method skips callbacks and therefore bypasses this issue. 186 | self.comments.each do |comment| 187 | comment.set(endorsed: false) 188 | comment.set(endorsement: nil) 189 | end 190 | end 191 | end 192 | 193 | def destroy_subscriptions 194 | subscriptions.delete_all 195 | end 196 | 197 | begin 198 | require 'new_relic/agent/method_tracer' 199 | include ::NewRelic::Agent::MethodTracer 200 | add_method_tracer :to_hash 201 | rescue LoadError 202 | logger.warn "NewRelic agent library not installed" 203 | end 204 | end 205 | -------------------------------------------------------------------------------- /models/commentable.rb: -------------------------------------------------------------------------------- 1 | class Commentable 2 | 3 | attr_accessor :id, :_type 4 | alias_attribute :_id, :id 5 | 6 | class << self; alias_method :find, :new; end 7 | 8 | def initialize(id) 9 | self.id = id 10 | self._type = self.class.to_s 11 | end 12 | 13 | def self.where(params={}) 14 | params[:id] ? [self.new(params[:id])] : self 15 | end 16 | 17 | def comment_threads 18 | CommentThread.where(commentable_id: id) 19 | end 20 | 21 | def subscriptions 22 | Subscription.where(source_id: id.to_s, source_type: self.class.to_s) 23 | end 24 | 25 | def subscribers 26 | subscriptions.map(&:subscriber) 27 | end 28 | 29 | end 30 | -------------------------------------------------------------------------------- /models/concerns/searchable.rb: -------------------------------------------------------------------------------- 1 | require 'logger' 2 | 3 | logger = Logger.new(STDOUT) 4 | logger.level = Logger::WARN 5 | 6 | module Searchable 7 | extend ActiveSupport::Concern 8 | 9 | included do 10 | include Elasticsearch::Model 11 | 12 | # We specify our own callbacks, instead of using Elasticsearch::Model::Callbacks, so that we can disable 13 | # indexing for tests where search functionality is not needed. This should improve test execution times. 14 | after_create :index_document 15 | after_update :update_indexed_document 16 | after_destroy :delete_document 17 | 18 | def as_indexed_json(options={}) 19 | # TODO: Play with the `MyModel.indexes` method -- reject non-mapped attributes, `:as` options, etc 20 | self.as_json(options.merge root: false) 21 | end 22 | 23 | # Class-level variable which toggles all ES callbacks. This should be an instance-level variable, 24 | # ideally, but it took us too long to get that working correctly. This should be safe because forums 25 | # code runs single-threaded. 26 | @@enable_es = true 27 | 28 | def es_enabled? 29 | @@enable_es 30 | end 31 | 32 | def without_es 33 | # A "Context Manager" to temporarily disable elasticsearch callbacks. Whatever happens, this makes 34 | # sure that enable_es is restored. E.g.: 35 | # 36 | # comment.without_es do 37 | # comment.update!(data) 38 | # end 39 | # 40 | original_enable_es = es_enabled? 41 | @@enable_es = false 42 | begin 43 | yield 44 | rescue 45 | @@enable_es = original_enable_es 46 | raise 47 | else 48 | @@enable_es = original_enable_es 49 | end 50 | end 51 | 52 | private # all methods below are private 53 | 54 | def index_document 55 | __elasticsearch__.index_document if CommentService.search_enabled? && es_enabled? 56 | end 57 | 58 | # This is named in this manner to prevent collisions with Mongoid's update_document method. 59 | def update_indexed_document 60 | begin 61 | __elasticsearch__.update_document if CommentService.search_enabled? && es_enabled? 62 | rescue Elasticsearch::Transport::Transport::Errors::NotFound => e 63 | # If attempting to update a document that doesn't exist, just continue. 64 | logger.warn "ES update failed upon update_document - not found." 65 | end 66 | end 67 | 68 | def delete_document 69 | begin 70 | __elasticsearch__.delete_document if CommentService.search_enabled? && es_enabled? 71 | rescue Elasticsearch::Transport::Transport::Errors::NotFound => e 72 | # If attempting to delete a document that doesn't exist, just continue. 73 | logger.warn "ES delete failed upon delete_document - not found." 74 | end 75 | end 76 | end 77 | end 78 | -------------------------------------------------------------------------------- /models/constants.rb: -------------------------------------------------------------------------------- 1 | BODY = "body".freeze 2 | COURSE_ID = "course_id".freeze 3 | ENDORSED = "endorsed".freeze 4 | ENDORSEMENT = "endorsement".freeze 5 | ANONYMOUS = "anonymous".freeze 6 | ANONYMOUS_TO_PEERS = "anonymous_to_peers".freeze 7 | CREATED_AT = "created_at".freeze 8 | UPDATED_AT = "updated_at".freeze 9 | AT_POSITION_LIST = "at_position_list".freeze 10 | THREAD_TYPE = "thread_type".freeze 11 | TITLE = "title".freeze 12 | COMMENTABLE_ID = "commentable_id".freeze 13 | CLOSED = "closed".freeze 14 | CONTEXT = "context".freeze 15 | LAST_ACTIVITY_AT = "last_activity_at".freeze 16 | NOTIFICATION_TYPE = "notification_type".freeze 17 | INFO = "info".freeze 18 | ACTOR_ID = "actor_id".freeze 19 | TARGET_ID = "target_id".freeze 20 | SUBSCRIBER_ID = "subscriber_id".freeze 21 | SOURCE_ID = "source_id".freeze 22 | SOURCE_TYPE = "source_type".freeze 23 | COUNT = "count".freeze 24 | UP_COUNT = "up_count".freeze 25 | DOWN_COUNT = "down_count".freeze 26 | POINT = "point".freeze 27 | USERNAME = "username".freeze 28 | EXTERNAL_ID = "external_id".freeze 29 | COMMENT = "comment".freeze 30 | THREAD = "thread".freeze 31 | 32 | REASON_CODE = "reason_code".freeze 33 | CLOSE_REASON_CODE = "close_reason_code".freeze 34 | EDIT_REASON_CODE = "edit_reason_code".freeze 35 | ORIGINAL_BODY = "original_body".freeze 36 | EDIT_HISTORY = "edit_history".freeze 37 | EDITOR_USERNAME = "editor_username".freeze 38 | 39 | 40 | RETIRED_TITLE = "[deleted]".freeze 41 | RETIRED_BODY = "[deleted]".freeze 42 | -------------------------------------------------------------------------------- /models/content.rb: -------------------------------------------------------------------------------- 1 | class Content 2 | include Mongoid::Document 3 | include Mongo::Voteable 4 | 5 | field :visible, type: Boolean, default: true 6 | field :abuse_flaggers, type: Array, default: [] 7 | field :historical_abuse_flaggers, type: Array, default: [] #preserve abuse flaggers after a moderator unflags 8 | field :author_username, type: String, default: nil 9 | 10 | index({_type: 1, course_id: 1, pinned: -1, created_at: -1}, {background: true}) 11 | index({_type: 1, course_id: 1, pinned: -1, comment_count: -1, created_at: -1}, {background: true}) 12 | index({_type: 1, course_id: 1, pinned: -1, 'votes.point' => -1, created_at: -1}, {background: true}) 13 | index({_type: 1, course_id: 1, pinned: -1, last_activity_at: -1, created_at: -1}, {background: true}) 14 | index({comment_thread_id: 1, sk: 1}, {sparse: true}) 15 | index({comment_thread_id: 1, endorsed: 1}, {sparse: true}) 16 | index({commentable_id: 1}, {sparse: true, background: true}) 17 | 18 | before_save :set_username 19 | 20 | 21 | def author_with_anonymity(attr=nil, attr_when_anonymous=nil) 22 | if not attr 23 | (anonymous || anonymous_to_peers) ? nil : author 24 | else 25 | (anonymous || anonymous_to_peers) ? attr_when_anonymous : author.send(attr) 26 | end 27 | end 28 | 29 | def self.flagged 30 | #return an array of flagged content 31 | holder = [] 32 | Content.where(:abuse_flaggers.ne => [], :abuse_flaggers.exists => true).each do |c| 33 | holder << c 34 | end 35 | holder 36 | end 37 | 38 | def self.prolific_metric what, count 39 | #take a hash of criteria (what) and return a hash of hashes 40 | #course => user => count 41 | 42 | map = 'function(){emit(this.author_id,1)}' 43 | reduce = 'function(k, vals) { var sum = 0; for(var i in vals) sum += vals[i]; return sum; }' 44 | 45 | contributors = [] 46 | self.where(what).map_reduce(map, reduce).out(replace: 'results').each do |d| 47 | contributors << d 48 | end 49 | 50 | #now sort and limit them 51 | 52 | #first sort destructively 53 | contributors.sort! { |a, b| -a['value'] <=> -b['value'] } 54 | #then trim it 55 | contributors = contributors[0..(count - 1)] 56 | 57 | contributors 58 | 59 | end 60 | 61 | def self.summary what 62 | #take a hash of criteria (what) and return a hash of hashes 63 | #of total users, votes, comments, endorsements, 64 | 65 | answer = {} 66 | vote_count = 0 67 | thread_count = 0 68 | comment_count = 0 69 | contributors = [] 70 | content = self.where(what) 71 | 72 | content.each do |c| 73 | contributors << c.author_id 74 | contributors << c['votes']['up'] 75 | contributors << c['votes']['down'] 76 | vote_count += c['votes']['count'] 77 | if c._type == 'CommentThread' 78 | thread_count += 1 79 | elsif c._type == 'Comment' 80 | comment_count += 1 81 | end 82 | end 83 | 84 | #uniquify contributors 85 | contributors = contributors.uniq 86 | 87 | #assemble the answer and ship 88 | 89 | answer['vote_count'] = vote_count 90 | answer['thread_count'] = thread_count 91 | answer['comment_count'] = comment_count 92 | answer['contributor_count'] = contributors.count 93 | 94 | answer 95 | end 96 | 97 | private 98 | 99 | def set_username 100 | # avoid having to look this attribute up later, since it does not change 101 | self.author_username = self.retired_username ? self.retired_username : author.username 102 | end 103 | end 104 | -------------------------------------------------------------------------------- /models/edit_history.rb: -------------------------------------------------------------------------------- 1 | class EditHistory 2 | include Mongoid::Document 3 | include Mongoid::Timestamps::Created 4 | 5 | field :original_body, type: String 6 | field :reason_code, type: String 7 | field :editor_username, type: String 8 | 9 | belongs_to :author, class_name: 'User', inverse_of: :comment_edits 10 | 11 | embedded_in :comment 12 | 13 | def to_hash 14 | as_document.slice(ORIGINAL_BODY, REASON_CODE, EDITOR_USERNAME, CREATED_AT) 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /models/notification.rb: -------------------------------------------------------------------------------- 1 | require_relative 'constants' 2 | 3 | class Notification 4 | include Mongoid::Document 5 | include Mongoid::Timestamps 6 | include ActiveModel::MassAssignmentSecurity 7 | 8 | field :notification_type, type: String 9 | field :info, type: Hash 10 | 11 | attr_accessible :notification_type, :info 12 | 13 | validates_presence_of :notification_type 14 | validates_presence_of :info 15 | 16 | has_and_belongs_to_many :receivers, class_name: "User", inverse_of: :notifications, autosave: true 17 | 18 | def to_hash(params={}) 19 | as_document 20 | .slice(NOTIFICATION_TYPE, INFO, ACTOR_ID, TARGET_ID) 21 | .merge!("id" => _id) 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /models/observers/at_user_observer.rb: -------------------------------------------------------------------------------- 1 | require 'set' 2 | 3 | class AtUserObserver < Mongoid::Observer 4 | observe :comment, :comment_thread 5 | 6 | def after_create(content) 7 | self.class.delay.process_at_notifications(content) 8 | end 9 | 10 | def self.process_at_notifications(content) 11 | text = content.body 12 | 13 | content_type = content.respond_to?(:title) ? :thread : :comment 14 | text = content.title + "\n\n" + text if content_type == :thread 15 | 16 | at_positions = self.get_valid_at_position_list text 17 | prev_at_positions = content.at_position_list 18 | 19 | content.update_attributes!(at_position_list: at_positions) 20 | 21 | prev_user_ids = prev_at_positions.map { |x| x[:user_id] }.to_set 22 | current_user_ids = at_positions.map { |x| x[:user_id] }.to_set 23 | 24 | new_user_ids = current_user_ids - prev_user_ids 25 | 26 | if content_type == :thread 27 | thread_title = content.title 28 | thread_id = content.id 29 | commentable_id = content.commentable_id 30 | else 31 | thread_title = content.comment_thread.title 32 | thread_id = content.comment_thread.id 33 | commentable_id = content.comment_thread.commentable_id 34 | end 35 | 36 | unless new_user_ids.empty? 37 | 38 | notification = Notification.new( 39 | notification_type: "at_user", 40 | info: { 41 | comment_id: (content.id if content_type == :comment), 42 | content_type: content_type, 43 | thread_title: thread_title, 44 | thread_id: thread_id, 45 | actor_username: content.author_with_anonymity(:username), 46 | actor_id: content.author_with_anonymity(:id), 47 | commentable_id: commentable_id, 48 | } 49 | ) 50 | receivers = new_user_ids.map { |id| User.find(id) } 51 | receivers.delete(content.author) 52 | notification.receivers << receivers 53 | notification.save! 54 | end 55 | end 56 | 57 | private 58 | 59 | AT_NOTIFICATION_REGEX = /(?<=^|\s)(@[A-Za-z0-9_]+)(?!\w)/ 60 | 61 | def self.get_marked_text(text) 62 | counter = -1 63 | text.gsub AT_NOTIFICATION_REGEX do 64 | counter += 1 65 | "#{$1}_#{counter}" 66 | end 67 | end 68 | 69 | def self.get_at_position_list(text) 70 | list = [] 71 | text.gsub AT_NOTIFICATION_REGEX do 72 | parts = $1.rpartition('_') 73 | username = parts.first[1..-1] 74 | user = User.where(username: username).first 75 | if user 76 | list << { position: parts.last.to_i, username: parts.first[1..-1], user_id: user.id } 77 | end 78 | end 79 | list 80 | end 81 | 82 | def self.get_valid_at_position_list(text) 83 | html = Nokogiri::HTML(RDiscount.new(self.get_marked_text(text)).to_html) 84 | html.xpath('//code').each do |c| 85 | c.children = '' 86 | end 87 | self.get_at_position_list html.to_s 88 | end 89 | end 90 | -------------------------------------------------------------------------------- /models/observers/post_reply_observer.rb: -------------------------------------------------------------------------------- 1 | class PostReplyObserver < Mongoid::Observer 2 | observe :comment 3 | 4 | def after_create(comment) 5 | self.class.delay.generate_activity_and_notifications(comment) 6 | end 7 | 8 | def self.generate_activity_and_notifications(comment) 9 | 10 | activity = Activity.new 11 | activity.happend_at = comment.created_at 12 | activity.anonymous = (comment.anonymous || comment.anonymous_to_peers) 13 | activity.actor = comment.author 14 | activity.target = comment.comment_thread 15 | activity.activity_type = "post_reply" 16 | activity.save! 17 | 18 | if comment.comment_thread.subscribers or (comment.author.followers if not activity.anonymous) 19 | notification = Notification.new( 20 | notification_type: "post_reply", 21 | info: { 22 | thread_id: comment.comment_thread.id, 23 | thread_title: comment.comment_thread.title, 24 | comment_id: comment.id, 25 | commentable_id: comment.comment_thread.commentable_id, 26 | actor_username: comment.author_with_anonymity(:username), 27 | actor_id: comment.author_with_anonymity(:id), 28 | }, 29 | ) 30 | receivers = (comment.comment_thread.subscribers + comment.author_with_anonymity(:followers, [])).uniq_by(&:id) 31 | receivers.delete(comment.author) 32 | notification.receivers << receivers 33 | notification.save! 34 | end 35 | end 36 | end 37 | -------------------------------------------------------------------------------- /models/observers/post_topic_observer.rb: -------------------------------------------------------------------------------- 1 | class PostTopicObserver < Mongoid::Observer 2 | observe :comment_thread 3 | 4 | def after_create(comment_thread) 5 | self.class.delay.generate_notifications(comment_thread) 6 | end 7 | 8 | def self.generate_notifications(comment_thread) 9 | activity = Activity.new 10 | activity.happend_at = comment_thread.created_at 11 | activity.anonymous = (comment_thread.anonymous || comment_thread.anonymous_to_peers) 12 | activity.actor = comment_thread.author 13 | #activity.target_id = comment_thread.commentable.id 14 | #activity.target_type = comment_thread.commentable._type 15 | activity.activity_type = "post_topic" 16 | activity.save! 17 | if comment_thread.commentable.subscribers or (author.followers if not activity.anonymous) 18 | notification = Notification.new( 19 | notification_type: "post_topic", 20 | info: { 21 | commentable_id: comment_thread.commentable_id, 22 | thread_id: comment_thread.id, 23 | thread_title: comment_thread.title, 24 | actor_username: comment_thread.author_with_anonymity(:username), 25 | actor_id: comment_thread.author_with_anonymity(:id), 26 | }, 27 | ) 28 | receivers = (comment_thread.commentable.subscribers + comment_thread.author_with_anonymity(:followers, [])).uniq_by(&:id) 29 | receivers.delete(comment_thread.author) 30 | notification.receivers << receivers 31 | notification.save! 32 | end 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /models/subscription.rb: -------------------------------------------------------------------------------- 1 | require_relative 'constants' 2 | 3 | class Subscription 4 | include Mongoid::Document 5 | include Mongoid::Timestamps 6 | 7 | field :subscriber_id, type: String 8 | field :source_id, type: String 9 | field :source_type, type: String 10 | 11 | index({subscriber_id: 1, source_id: 1, source_type: 1}) 12 | index({subscriber_id: 1, source_type: 1}) 13 | index({subscriber_id: 1}) 14 | index({source_id: 1, source_type: 1}, {background: true}) 15 | 16 | def to_hash 17 | as_document 18 | .slice(SUBSCRIBER_ID, SOURCE_ID, SOURCE_TYPE) 19 | .merge!("id" => _id) 20 | end 21 | 22 | def subscriber 23 | User.find(subscriber_id) 24 | end 25 | 26 | def source 27 | source_type.constantize.find(source_id) 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /mongoutil.rb: -------------------------------------------------------------------------------- 1 | def get_db_is_master 2 | Mongoid::Clients.default.command(isMaster: 1) 3 | end 4 | 5 | def is_mongo_primary? 6 | begin 7 | response = get_db_is_master 8 | return response.ok? && 9 | response.documents.first['ismaster'] == true 10 | rescue 11 | # ignored 12 | end 13 | 14 | false 15 | end 16 | 17 | def is_mongo_available? 18 | begin 19 | response = get_db_is_master 20 | return response.ok? && 21 | (response.documents.first['ismaster'] == true || 22 | Mongoid::Clients.default.options[:read][:mode] != :primary) 23 | rescue 24 | # ignored 25 | end 26 | 27 | false 28 | end 29 | 30 | 31 | def reconnect_mongo_primary 32 | begin 33 | Mongoid::Clients.default.close 34 | Mongoid::Clients.default.reconnect 35 | end unless is_mongo_primary? 36 | end 37 | -------------------------------------------------------------------------------- /presenters/thread.rb: -------------------------------------------------------------------------------- 1 | require 'logger' 2 | require_relative 'thread_utils' 3 | 4 | class ThreadPresenter 5 | 6 | def self.factory(thread, user, count_flagged=false) 7 | # use when working with one thread at a time. fetches extended / 8 | # derived attributes from the db and explicitly initializes an instance. 9 | course_id = thread.course_id 10 | thread_key = thread._id.to_s 11 | is_read, unread_count = ThreadUtils 12 | .get_read_states([thread], user, course_id) 13 | .fetch(thread_key, [false, thread.comment_count]) 14 | is_endorsed = ThreadUtils.get_endorsed([thread]).fetch(thread_key, false) 15 | abuse_flagged_count = count_flagged ? 16 | ThreadUtils.get_abuse_flagged_count([thread]).fetch(thread_key, nil) : 17 | nil 18 | self.new thread, user, is_read, unread_count, is_endorsed, abuse_flagged_count 19 | end 20 | 21 | def initialize(thread, user, is_read, unread_count, is_endorsed, abuse_flagged_count) 22 | # generally not intended for direct use. instantiated by self.factory or 23 | # by thread list presenters. 24 | @thread = thread 25 | @user = user 26 | @is_read = is_read 27 | @unread_count = unread_count 28 | @is_endorsed = is_endorsed 29 | @abuse_flagged_count = abuse_flagged_count 30 | end 31 | 32 | def to_hash( 33 | with_responses=false, 34 | resp_skip=0, 35 | resp_limit=nil, 36 | recursive=true, 37 | flagged_comments=false, 38 | reverse_order=false, 39 | merge_question_type_responses=false 40 | ) 41 | raise ArgumentError unless resp_skip >= 0 42 | raise ArgumentError unless resp_limit.nil? or resp_limit >= 1 43 | h = @thread.to_hash 44 | h["read"] = @is_read 45 | h["unread_comments_count"] = @unread_count 46 | h["endorsed"] = @is_endorsed || false 47 | unless @abuse_flagged_count.nil? 48 | h["abuse_flagged_count"] = @abuse_flagged_count 49 | end 50 | sorting_key_order = reverse_order ? -1 : 1 51 | if with_responses 52 | if (@thread.thread_type.discussion? || (@thread.thread_type.question? && merge_question_type_responses)) && 53 | resp_skip == 0 && resp_limit.nil? 54 | if recursive 55 | content = Comment.where(comment_thread_id: @thread._id).order_by({"sk" => sorting_key_order}) 56 | else 57 | content = Comment.where(comment_thread_id: @thread._id, "parent_ids" => []).order_by({"sk" => sorting_key_order}) 58 | end 59 | if flagged_comments 60 | content = content.where(:abuse_flaggers.nin => [nil, []]) 61 | end 62 | h["children"] = merge_response_content(content) 63 | h["resp_total"] = content.to_a.select{|d| d.depth == 0 }.length 64 | else 65 | responses = Content.where(comment_thread_id: @thread._id).exists(parent_id: false) 66 | if flagged_comments 67 | responses = responses.where(:abuse_flaggers.nin => [nil, []]) 68 | end 69 | case @thread.thread_type 70 | when "question" 71 | if merge_question_type_responses 72 | response_info = get_paged_merged_responses( 73 | @thread._id, 74 | responses, 75 | resp_skip, 76 | resp_limit, 77 | recursive, 78 | sorting_key_order 79 | ) 80 | h["children"] = response_info["responses"] 81 | h["resp_total"] = response_info["response_count"] 82 | else 83 | endorsed_responses = responses.where(endorsed: true) 84 | non_endorsed_responses = responses.where(endorsed: false) 85 | endorsed_response_info = get_paged_merged_responses( 86 | @thread._id, 87 | endorsed_responses, 88 | 0, 89 | nil, 90 | recursive, 91 | sorting_key_order 92 | ) 93 | non_endorsed_response_info = get_paged_merged_responses( 94 | @thread._id, 95 | non_endorsed_responses, 96 | resp_skip, 97 | resp_limit, 98 | recursive, 99 | sorting_key_order 100 | ) 101 | h["endorsed_responses"] = endorsed_response_info["responses"] 102 | h["non_endorsed_responses"] = non_endorsed_response_info["responses"] 103 | h["non_endorsed_resp_total"] = non_endorsed_response_info["response_count"] 104 | h["resp_total"] = non_endorsed_response_info["response_count"] + endorsed_response_info["response_count"] 105 | end 106 | when "discussion" 107 | response_info = get_paged_merged_responses( 108 | @thread._id, 109 | responses, 110 | resp_skip, 111 | resp_limit, 112 | recursive, 113 | sorting_key_order 114 | ) 115 | h["children"] = response_info["responses"] 116 | h["resp_total"] = response_info["response_count"] 117 | end 118 | end 119 | h["resp_skip"] = resp_skip 120 | h["resp_limit"] = resp_limit 121 | end 122 | h 123 | end 124 | 125 | # Given a Mongoid object representing responses, apply pagination and return 126 | # a hash containing the following: 127 | # responses 128 | # An array of hashes representing the page of responses (including 129 | # children, if recursive is true) 130 | # response_count 131 | # The total number of responses 132 | def get_paged_merged_responses(thread_id, responses, skip, limit, recursive=false, sorting_key_order) 133 | response_ids = responses.only(:_id).sort({"sk" => sorting_key_order}).to_a.map{|doc| doc["_id"]} 134 | paged_response_ids = limit.nil? ? response_ids.drop(skip) : response_ids.drop(skip).take(limit) 135 | if recursive 136 | content = Comment.where(comment_thread_id: thread_id). 137 | any_of({:parent_id => {"$in" => paged_response_ids}}, {:id => {"$in" => paged_response_ids}}). 138 | sort({"sk" => sorting_key_order}) 139 | else 140 | content = Comment.where(comment_thread_id: thread_id, "parent_ids" => []). 141 | where({:id => {"$in" => paged_response_ids}}).sort({"sk" => sorting_key_order}) 142 | end 143 | {"responses" => merge_response_content(content), "response_count" => response_ids.length} 144 | end 145 | 146 | # Takes content output from Mongoid in a depth-first traversal order and 147 | # returns an array of first-level response hashes with content represented 148 | # hierarchically, with a comment's list of children in the key "children". 149 | def merge_response_content(content) 150 | top_level = [] 151 | ancestry = [] 152 | orphans = [] 153 | content.each do |item| 154 | item_hash = item.to_hash.merge!("children" => []) 155 | if item.parent_id.nil? 156 | top_level << item_hash 157 | ancestry = [item_hash] 158 | # When the content is reversed, we collect orphan items 159 | # until reach their parent. Here we iterate through 160 | # orphans and assign as children to the top item. 161 | unless orphans.empty? 162 | orphans.each do |orphan| 163 | if item.id == orphan["parent_id"] 164 | item_hash["children"] << orphan 165 | end 166 | end 167 | orphans = [] 168 | end 169 | else 170 | # "ancestry" can be empty only when the order is reversed. 171 | if ancestry.empty? 172 | ancestry << item_hash 173 | orphans << item_hash 174 | next 175 | end 176 | 177 | while ancestry.length > 0 do 178 | if item.parent_id == ancestry.last["id"] 179 | ancestry.last["children"] << item_hash 180 | ancestry << item_hash 181 | break 182 | elsif ancestry.length == 1 183 | # "ancestry" here can equal to 1 only when the order is reversed. 184 | orphans << item_hash 185 | ancestry.pop 186 | else 187 | ancestry.pop 188 | end 189 | end 190 | end 191 | end 192 | top_level 193 | end 194 | logger = Logger.new(STDOUT) 195 | logger.level = Logger::WARN 196 | begin 197 | require 'new_relic/agent/method_tracer' 198 | include ::NewRelic::Agent::MethodTracer 199 | add_method_tracer :to_hash 200 | add_method_tracer :merge_response_content 201 | rescue LoadError 202 | logger.warn "NewRelic agent library not installed" 203 | end 204 | 205 | end 206 | -------------------------------------------------------------------------------- /presenters/thread_list.rb: -------------------------------------------------------------------------------- 1 | require_relative 'thread' 2 | require_relative 'thread_utils' 3 | 4 | class ThreadListPresenter 5 | 6 | def initialize(threads, user, course_id, count_flagged=false) 7 | read_states = ThreadUtils.get_read_states(threads, user, course_id) 8 | threads_endorsed = ThreadUtils.get_endorsed(threads) 9 | if count_flagged 10 | threads_flagged = ThreadUtils.get_abuse_flagged_count(threads) 11 | else 12 | threads_flagged = Hash.new 13 | end 14 | @presenters = threads.map do |thread| 15 | thread_key = thread._id.to_s 16 | is_read, unread_count = read_states.fetch(thread_key, [false, thread.comment_count]) 17 | is_endorsed = threads_endorsed.fetch(thread_key, false) 18 | abuse_flagged_count = threads_flagged.fetch(thread_key, nil) 19 | ThreadPresenter.new(thread, user, is_read, unread_count, is_endorsed, abuse_flagged_count) 20 | end 21 | end 22 | 23 | def to_hash 24 | @presenters.map { |p| p.to_hash } 25 | end 26 | 27 | end 28 | -------------------------------------------------------------------------------- /presenters/thread_utils.rb: -------------------------------------------------------------------------------- 1 | require 'logger' 2 | 3 | 4 | module ThreadUtils 5 | 6 | def self.get_endorsed(threads) 7 | # returns sparse hash {thread_key => true, ...} 8 | # only threads which are endorsed will have entries, value will always be true. 9 | endorsed_threads = {} 10 | thread_ids = threads.collect {|t| t._id} 11 | Comment.collection.aggregate([ 12 | {"$match" => {"comment_thread_id" => {"$in" => thread_ids}, "endorsed" => true}}, 13 | {"$group" => {"_id" => "$comment_thread_id"}} 14 | ]).each do |res| 15 | endorsed_threads[res["_id"].to_s] = true 16 | end 17 | endorsed_threads 18 | end 19 | 20 | def self.get_abuse_flagged_count(threads) 21 | # returns a count of flagged threads for all comments within the thread 22 | # only threads with at least one flag are returned 23 | flagged_threads = {} 24 | thread_ids = threads.collect {|t| t._id} 25 | Comment.collection.aggregate([ 26 | {"$match" => { 27 | "comment_thread_id" => {"$in" => thread_ids}, 28 | "abuse_flaggers"=> {"$ne"=> []}, 29 | }}, 30 | {"$group" => {"_id" => "$comment_thread_id", "flagged" => { "$sum" => 1}} } 31 | ]).each do |res| 32 | flagged_threads[res["_id"].to_s] = res["flagged"] 33 | end 34 | flagged_threads 35 | end 36 | 37 | def self.get_read_states(threads, user, course_id) 38 | # returns sparse hash {thread_key => [is_read, unread_comment_count], ...} 39 | read_states = {} 40 | if user 41 | read_dates = {} 42 | read_state = user.read_states.where(:course_id => course_id).first 43 | if read_state 44 | read_dates = read_state["last_read_times"].to_hash 45 | threads.each do |t| 46 | thread_key = t._id.to_s 47 | if read_dates.has_key? thread_key 48 | is_read = read_dates[thread_key] >= t.last_activity_at 49 | unread_comment_count = Comment.collection.find( 50 | :comment_thread_id => t._id, 51 | :author_id => {"$ne" => user.id}, 52 | :created_at => {"$gte" => read_dates[thread_key]} 53 | ).count 54 | read_states[thread_key] = [is_read, unread_comment_count] 55 | end 56 | end 57 | end 58 | end 59 | read_states 60 | end 61 | 62 | class << self 63 | logger = Logger.new(STDOUT) 64 | logger.level = Logger::WARN 65 | begin 66 | require 'new_relic/agent/method_tracer' 67 | include ::NewRelic::Agent::MethodTracer 68 | add_method_tracer :get_read_states 69 | add_method_tracer :get_endorsed 70 | rescue LoadError 71 | logger.warn "NewRelic agent library not installed" 72 | end 73 | end 74 | 75 | end 76 | -------------------------------------------------------------------------------- /scripts/db/migrate-001-sk-author_username.js: -------------------------------------------------------------------------------- 1 | 2 | print ("backpopulating author_username into contents collection"); 3 | var tot = db.users.count(); 4 | print ("found " + tot + " users to process..."); 5 | var cnt = 0; 6 | db.users.find({}, {external_id:1, username:1}).forEach(function (doc) { 7 | db.contents.update( 8 | {author_id:doc["external_id"], author_username:{$exists:false}}, 9 | {$set:{author_username:doc["username"]}}, 10 | {multi:true} 11 | ); 12 | cnt += 1; 13 | if (cnt == tot) { 14 | print("done!"); 15 | } else if (cnt % 1000 === 0) { 16 | print("processed " + cnt + " records (" + parseInt((cnt/tot)*100) + "% complete)"); 17 | } 18 | }); 19 | 20 | print ("backpopulating content with orphaned author ids"); 21 | db.contents.update({author_username:{$exists:false}}, {$set:{author_username:null}}, {multi:true}); 22 | print ("done!"); 23 | 24 | print ("backpopulating hierarchical sorting keys into contents collection"); 25 | var tot = db.contents.find({"_type":"Comment","sk":{$exists:false}}).count(); 26 | print ("found " + tot + " comments to process..."); 27 | var cnt = 0; 28 | db.contents.find({"_type":"Comment","sk":{$exists:false}}).forEach(function (doc) { 29 | var i, sort_ids; 30 | if (typeof(doc.sk)==="undefined") { 31 | if (typeof(doc.parent_ids)==="undefined") { 32 | sort_ids = []; 33 | } else { 34 | sort_ids = doc.parent_ids.slice(0); 35 | } 36 | sort_ids.push(doc._id); 37 | doc.sk = sort_ids.map(function (oid) {return oid.str}).join("-"); 38 | db.contents.save(doc); 39 | } 40 | cnt += 1; 41 | if (cnt == tot) { 42 | print("done!"); 43 | } else if (cnt % 1000 === 0) { 44 | print("processed " + cnt + " records (" + parseInt((cnt/tot)*100) + "% complete)"); 45 | } 46 | }); 47 | 48 | print ("creating index on new sorting keys..."); 49 | db.contents.ensureIndex({"sk":1}) 50 | print ("all done!"); 51 | 52 | -------------------------------------------------------------------------------- /scripts/db/migrate-002-update-indexes.js: -------------------------------------------------------------------------------- 1 | db.contents.ensureIndex({ _type: 1, comment_thread_id: 1, author_id: 1, updated_at: 1 }, { background: true }) 2 | db.contents.ensureIndex({ comment_thread_id: 1, sk: 1 }, { background: true, sparse: true }) 3 | db.contents.ensureIndex({ comment_thread_id: 1, endorsed: 1 }, { background: true, sparse: true }) 4 | db.contents.ensureIndex({ _type: 1, course_id: 1, pinned: -1, created_at: -1 }, { background: true }) 5 | 6 | db.contents.dropIndex({ sk: 1 }) // the new one (created above) supersedes this 7 | -------------------------------------------------------------------------------- /scripts/db/migrate-003-update-indexes.js: -------------------------------------------------------------------------------- 1 | db.users.dropIndex({ external_id: 1 }) // drop the non-unique one 2 | db.users.ensureIndex({ external_id: 1 }, { unique: true, background: true }) 3 | db.subscriptions.ensureIndex({ source_id: 1, source_type: 1 }, { background: true }) 4 | -------------------------------------------------------------------------------- /scripts/db/migrate-004-update-indexes.js: -------------------------------------------------------------------------------- 1 | db.contents.ensureIndex({_type: 1, course_id: 1, pinned: -1, comment_count: -1, created_at: -1}, {background: true}) 2 | db.contents.ensureIndex({_type: 1, course_id: 1, pinned: -1, "votes.point": -1, created_at: -1}, {background: true}) 3 | -------------------------------------------------------------------------------- /scripts/db/migrate-005-update-indexes.js: -------------------------------------------------------------------------------- 1 | db.contents.ensureIndex({commentable_id: 1}, {sparse: true, background: true}) 2 | -------------------------------------------------------------------------------- /scripts/db/migrate-006-remove-email.js: -------------------------------------------------------------------------------- 1 | db.users.dropIndex({email: 1}) 2 | -------------------------------------------------------------------------------- /scripts/db/migrate-007-thread-type.js: -------------------------------------------------------------------------------- 1 | print ("Adding thread_type to all comment threads where it does not yet exist\n"); 2 | db.contents.update( 3 | {_type: "CommentThread", thread_type: {$exists: false}}, 4 | {$set: {thread_type: "discussion"}}, 5 | {multi: true} 6 | ); 7 | printjson (db.runCommand({ getLastError: 1, w: "majority", wtimeout: 5000 } )); 8 | -------------------------------------------------------------------------------- /scripts/db/migrate-008-context.js: -------------------------------------------------------------------------------- 1 | print ("Add the new indexes for the context field"); 2 | db.contents.ensureIndex({ _type: 1, course_id: 1, context: 1, pinned: -1, created_at: -1 }, {background: true}) 3 | db.contents.ensureIndex({ _type: 1, commentable_id: 1, context: 1, pinned: -1, created_at: -1 }, {background: true}) 4 | 5 | print ("Adding context to all comment threads where it does not yet exist\n"); 6 | var bulk = db.contents.initializeUnorderedBulkOp(); 7 | bulk.find( {_type: "CommentThread", context: {$exists: false}} ).update( {$set: {context: "course"}} ); 8 | bulk.execute(); 9 | printjson (db.runCommand({ getLastError: 1, w: "majority", wtimeout: 5000 } )); 10 | -------------------------------------------------------------------------------- /scripts/db/migrate-009-comment_thread-author-created_at-indexes.js: -------------------------------------------------------------------------------- 1 | db.contents.ensureIndex({ comment_thread_id: 1, author_id: 1, created_at: 1 }, { background: true }) 2 | -------------------------------------------------------------------------------- /scripts/db/migrate-010-user-course_stats-course_id-index.js: -------------------------------------------------------------------------------- 1 | db.users.ensureIndex({'course_stats.course_id': 1}, { background: true }) 2 | -------------------------------------------------------------------------------- /scripts/db/revert-migrate-001-sk-author_username.js: -------------------------------------------------------------------------------- 1 | print ("removing fields 'sk' and 'author_username' from contents collection..."); 2 | db.contents.update({}, {$unset:{"sk":1, "author_username":1}}, { multi: true }); 3 | print ("removing index on contents.sk"); 4 | db.contents.dropIndex({"sk":1}); 5 | print ("all done!"); 6 | 7 | -------------------------------------------------------------------------------- /scripts/db/revert-migrate-002-update-indexes.js: -------------------------------------------------------------------------------- 1 | db.contents.ensureIndex({ sk: 1 }, { background: true, safe: true }) 2 | 3 | db.contents.dropIndex({ comment_thread_id: 1, updated_at: 1 }) 4 | db.contents.dropIndex({ comment_thread_id: 1, sk: 1 }) 5 | db.contents.dropIndex({ comment_thread_id: 1, endorsed: 1 }) 6 | db.contents.dropIndex({ _type: 1, course_id: 1, pinned: -1, created_at: -1 }) 7 | -------------------------------------------------------------------------------- /scripts/db/revert-migrate-003-update-indexes.js: -------------------------------------------------------------------------------- 1 | db.users.dropIndex({ external_id: 1 }) // drop the unique one 2 | db.users.ensureIndex({ external_id: 1 }, { background: true }) 3 | db.subscriptions.dropIndex({ source_id: 1, source_type: 1 }) 4 | -------------------------------------------------------------------------------- /scripts/db/revert-migrate-004-update-indexes.js: -------------------------------------------------------------------------------- 1 | db.contents.dropIndex({_type: 1, course_id: 1, pinned: -1, comment_count: -1, created_at: -1}) 2 | db.contents.dropIndex({_type: 1, course_id: 1, pinned: -1, "votes.point": -1, created_at: -1}) 3 | -------------------------------------------------------------------------------- /scripts/db/revert-migrate-005-update-indexes.js: -------------------------------------------------------------------------------- 1 | db.contents.dropIndex({commentable_id: 1}) 2 | -------------------------------------------------------------------------------- /scripts/db/revert-migrate-006-remove-email.js: -------------------------------------------------------------------------------- 1 | db.users.ensureIndex({email: 1}, {background: true}) 2 | -------------------------------------------------------------------------------- /scripts/db/revert-migrate-007-thread-type.js: -------------------------------------------------------------------------------- 1 | print ("Removing thread_type from all comment threads\n"); 2 | db.contents.update( 3 | {_type: "CommentThread"}, 4 | {$unset: {thread_type: ""}}, 5 | {multi: true} 6 | ); 7 | printjson (db.runCommand({ getLastError: 1, w: "majority", wtimeout: 5000 } )); 8 | -------------------------------------------------------------------------------- /scripts/db/revert-migrate-008-context.js: -------------------------------------------------------------------------------- 1 | print ("remove the indexes for the context field"); 2 | db.contents.dropIndex({ _type: 1, course_id: 1, context: 1, pinned: -1, created_at: -1 }) 3 | db.contents.dropIndex({ _type: 1, commentable_id: 1, context: 1, pinned: -1, created_at: -1 }) 4 | 5 | print ("Removing context from all comment threads\n"); 6 | db.contents.update( 7 | {_type: "CommentThread"}, 8 | {$unset: {context: ""}}, 9 | {multi: true} 10 | ); 11 | printjson (db.runCommand({ getLastError: 1, w: "majority", wtimeout: 5000 } )); 12 | -------------------------------------------------------------------------------- /scripts/db/revert-migrate-010-user-course_stats-course_id-index.js: -------------------------------------------------------------------------------- 1 | db.users.dropIndex({'course_stats.course_id': 1}) 2 | -------------------------------------------------------------------------------- /scripts/es/copy-index.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # 4 | # Thin wrapper around stream2es. 5 | # 6 | # https://github.com/elasticsearch/stream2es 7 | # 8 | # Copies an index from an elasticsearch source server to a target server. 9 | # The target server can be the same as the source. 10 | # 11 | # Example: 12 | # 13 | # ./copy-index.sh http://localhost:9200 source_index http://localhost:9200 target_index 14 | # 15 | 16 | SOURCE_SERVER=$1 17 | SOURCE_INDEX=$2 18 | TARGET_SERVER=$3 19 | TARGET_INDEX=$4 20 | 21 | WORKERS="6" 22 | 23 | stream2es es -w ${WORKERS} --source "${SOURCE_SERVER}/${SOURCE_INDEX}" --target "${TARGET_SERVER}/${TARGET_INDEX}" 24 | -------------------------------------------------------------------------------- /scripts/es/incremental-copy-index.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # 4 | # Thin wrapper around stream2es. 5 | # 6 | # https://github.com/elasticsearch/stream2es 7 | # 8 | # Copies an index from an elasticsearch source server to a target server. 9 | # The target server can be the same as the source. 10 | # 11 | # Requires jq 12 | # 13 | # http://stedolan.github.io/jq/ 14 | # 15 | # Assumes that both stream2es and jq are on your PATH. 16 | # 17 | # Example: 18 | # 19 | # ./incremental-copy-index.sh http://localhost:9200 source_index http://localhost:9200 target_index 20 | # 21 | 22 | SOURCE_SERVER=$1 23 | SOURCE_INDEX=$2 24 | TARGET_SERVER=$3 25 | TARGET_INDEX=$4 26 | 27 | WORKERS="6" 28 | # 29 | # Statistical breakdown of date fields on the target index to determine 30 | # range limits for the subsequent query of the source index. 31 | STATS=$(curl -s -XPOST "${TARGET_SERVER}/${TARGET_INDEX}/_search" -d @query-max-date.json) 32 | 33 | # Returns a document containing 34 | # "facets": { 35 | # "created_at_stats": { 36 | # "_type": "statistical", 37 | # "count": 802103, 38 | # "total": 1108393376211744500, 39 | # "min": 1345745023000, 40 | # "max": 1399317877000, 41 | # "mean": 1381859158003.08, 42 | # "sum_of_squares": 1.5318040597253865e+30, 43 | # "variance": 200126588772432280000, 44 | # "std_deviation": 14146610504.726292 45 | # }, 46 | # "updated_at_stats": { 47 | # "_type": "statistical", 48 | # "count": 802103, 49 | # "total": 1108407292058564700, 50 | # "min": 1345745083000, 51 | # "max": 1399317877000, 52 | # "mean": 1381876507204.891, 53 | # "sum_of_squares": 1.5318424128841502e+30, 54 | # "variance": 199993733758660100000, 55 | # "std_deviation": 14141914076.908403 56 | # } 57 | # } 58 | 59 | # extract the max create and update time in millis since epoch 60 | MAX_CREATED_AT=$( echo $STATS | jq -r '.facets.created_at_stats.max' ) 61 | MAX_UPDATED_AT=$( echo $STATS | jq -r '.facets.updated_at_stats.max' ) 62 | 63 | # expand the lower bound of the query by a second, allowing for 64 | # latency between writes in the ruby application and replication 65 | # to elasticsearch. 66 | MAX_CREATED_AT=$((MAX_CREATED_AT-1000)) 67 | MAX_UPDATED_AT=$((MAX_UPDATED_AT-1000)) 68 | 69 | echo "Updating the target indices with records added since ${MAX_CREATED_AT} or updated since ${MAX_UPDATED_AT}" 70 | 71 | # Finds records in the source that are newer than the latest 72 | # document in the target. 73 | QUERY=" 74 | { 75 | \"query\":{ 76 | \"filtered\":{ 77 | \"query\":{ 78 | \"match_all\":{ 79 | 80 | } 81 | }, 82 | \"filter\":{ 83 | \"or\":{ 84 | \"filters\":[ 85 | { 86 | \"range\":{ 87 | \"created_at\":{ 88 | \"from\":\"${MAX_CREATED_AT}\", 89 | \"to\":\"now\" 90 | } 91 | } 92 | }, 93 | { 94 | \"range\":{ 95 | \"updated_at\":{ 96 | \"from\":\"${MAX_UPDATED_AT}\", 97 | \"to\":\"now\" 98 | } 99 | } 100 | } 101 | ] 102 | } 103 | } 104 | } 105 | } 106 | } 107 | " 108 | 109 | echo $QUERY 110 | 111 | stream2es es -w ${WORKERS} --query "${QUERY}" --source "${SOURCE_SERVER}/${SOURCE_INDEX}" --target "${TARGET_SERVER}/${TARGET_INDEX}" 112 | -------------------------------------------------------------------------------- /scripts/es/migrate-index.sh: -------------------------------------------------------------------------------- 1 | # USAGE 2 | 3 | # 1). Use `bundle exec rake db:create_search_indexes` to generate 4 | # new indexes with the appropriate mappings as declared in application 5 | # model code. 6 | 7 | # 2). Run this script for each of the two indexes created in the above 8 | # step. For example, if db:create_search_indexes outputs the following: 9 | # 10 | # comment_threads_1234567890 11 | # comments_1234567890 12 | # 13 | # Then you need to run the following two commands (setting the 14 | # elasticsearch url as needed): 15 | # 16 | # ./migrate-index.sh http://my-es-host:9200 comment_threads comment_threads_1234567890 17 | # ./migrate-index.sh http://my-es-host:9200 comments comments_1234567890 18 | 19 | 20 | # HOW IT WORKS 21 | 22 | # ---T1---(W1)---T2---(W2)---T3---------> 23 | # \ \ \ 24 | # \ \ incremental-copy-index completes 25 | # \ \ 26 | # \ copy-index completes 27 | # \ alias moves 28 | # \ incremental-copy-index begins 29 | # \ 30 | # copy-index begins 31 | # 32 | 33 | # During W1, the new index is created but is not yet live. The current 34 | # index's documents are copied into the new index, as of T1 (see 35 | # copy-index.sh). 36 | 37 | # At T2, when this initial copy finishes, the new index is made live and 38 | # requests to the application begin using it (though it is still missing 39 | # changes from W1). 40 | 41 | # During W2, all documents in the old (previously live) index that were 42 | # created or modified during W1 are copied into the newly-live index 43 | # (see incremental-copy-index.sh). 44 | 45 | # At T3, when the second copy finishes, the new (newly-live) index is up 46 | # to date and the old index can be discarded. 47 | 48 | 49 | # WARNING 50 | 51 | # When performed while the application is online, the migration process 52 | # is prone to a race condition, whose likelihood increases with the 53 | # amount of write traffic against the application. 54 | 55 | # Specifically, if a document is created or modified during W1, and the 56 | # same document is modified (or deleted) during W2, the two 57 | # modifications *may* be applied out of sequence, resulting in either a 58 | # lost update or a reverted deletion. 59 | 60 | # Therefore, manual steps must be taken to ensure the index and the 61 | # source database are fully synchronized, if these tools are used for an 62 | # online index migration. Otherwise, consider closing your application 63 | # instance for maintenance while migrating your indexes. 64 | 65 | 66 | ES_URL=$1 67 | ALIAS=$2 68 | NEW_INDEX=$3 69 | 70 | # determine the existing alias and set OLD_INDEX 71 | OLD_INDEX=`curl -X GET $ES_URL/_alias/$ALIAS | jq -r 'keys[0]'` 72 | 73 | echo old index: $OLD_INDEX 74 | echo new index: $NEW_INDEX 75 | 76 | if [ $OLD_INDEX = $NEW_INDEX ]; then 77 | echo "Alias ${ALIAS} already points to the new index. Nothing to do." 78 | exit 0 79 | fi 80 | 81 | # regenerate presently existing documents in new index 82 | ./copy-index.sh $ES_URL $OLD_INDEX $ES_URL $NEW_INDEX 83 | 84 | # move alias atomically to new index 85 | BODY=" 86 | { 87 | \"actions\" : [ 88 | {\"remove\": {\"index\" : \"${OLD_INDEX}\", \"alias\" : \"${ALIAS}\" } }, 89 | {\"add\": {\"index\" : \"${NEW_INDEX}\", \"alias\" : \"${ALIAS}\" } } 90 | ] 91 | } 92 | " 93 | curl -X POST "${ES_URL}/_aliases" -d "${BODY}" 94 | 95 | # pick up any missed updates since the first copy 96 | ./incremental-copy-index.sh $ES_URL $OLD_INDEX $ES_URL $NEW_INDEX 97 | -------------------------------------------------------------------------------- /scripts/es/query-max-date.json: -------------------------------------------------------------------------------- 1 | { 2 | "query" : { 3 | "match_all" : {} 4 | }, 5 | "facets" : { 6 | "created_at_stats" : { 7 | "statistical" : { 8 | "field" : "created_at" 9 | } 10 | }, 11 | "updated_at_stats": { 12 | "statistical": { 13 | "field": "updated_at" 14 | } 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /spec/api/abuse_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | 3 | describe 'Abuse API' do 4 | before(:each) { set_api_key_header } 5 | 6 | shared_examples 'an abuse endpoint' do 7 | let(:affected_entity_id) { affected_entity.id } 8 | let(:user_id) { create(:user).id } 9 | 10 | it { is_expected.to be_ok } 11 | 12 | it 'updates the abuse flaggers' do 13 | subject 14 | 15 | affected_entity.reload 16 | expect(affected_entity.abuse_flaggers).to eq expected_abuse_flaggers 17 | expect(non_affected_entity.abuse_flaggers).to have(0).items 18 | end 19 | 20 | context 'if the comment does not exist' do 21 | let(:affected_entity_id) { 'does_not_exist' } 22 | it { is_expected.to be_bad_request } 23 | its(:body) { should eq "[\"#{I18n.t(:requested_object_not_found)}\"]" } 24 | end 25 | 26 | context 'if no user_id is provided' do 27 | let(:user_id) { nil } 28 | it { is_expected.to be_bad_request } 29 | its(:body) { should eq "[\"#{I18n.t(:user_id_is_required)}\"]" } 30 | end 31 | end 32 | 33 | describe 'comment actions' do 34 | let(:affected_entity) { create(:comment, abuse_flaggers: []) } 35 | let(:non_affected_entity) { affected_entity.comment_thread } 36 | 37 | context 'when flagging a comment for abuse' do 38 | let(:expected_abuse_flaggers) { [user_id] } 39 | subject { put "/api/v1/comments/#{affected_entity_id}/abuse_flag", user_id: user_id } 40 | 41 | it_behaves_like 'an abuse endpoint' 42 | end 43 | 44 | context 'when un-flagging a comment for abuse' do 45 | let(:affected_entity) { create(:comment, abuse_flaggers: [user_id]) } 46 | let(:expected_abuse_flaggers) { [] } 47 | subject { put "/api/v1/comments/#{affected_entity_id}/abuse_unflag", user_id: user_id } 48 | 49 | it_behaves_like 'an abuse endpoint' 50 | end 51 | end 52 | 53 | describe 'comment thread actions' do 54 | let(:affected_entity) { create(:comment_thread, abuse_flaggers: []) } 55 | let(:non_affected_entity) { create(:comment, comment_thread: affected_entity) } 56 | 57 | context 'when flagging a comment thread for abuse' do 58 | let(:expected_abuse_flaggers) { [user_id] } 59 | subject { put "/api/v1/threads/#{affected_entity_id}/abuse_flag", user_id: user_id } 60 | 61 | it_behaves_like 'an abuse endpoint' 62 | end 63 | 64 | context 'when un-flagging a comment thread for abuse' do 65 | let(:affected_entity) { create(:comment_thread, abuse_flaggers: [user_id]) } 66 | let(:expected_abuse_flaggers) { [] } 67 | subject { put "/api/v1/threads/#{affected_entity_id}/abuse_unflag", user_id: user_id } 68 | 69 | it_behaves_like 'an abuse endpoint' 70 | end 71 | end 72 | end 73 | -------------------------------------------------------------------------------- /spec/api/i18n_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | 3 | describe 'i18n' do 4 | 5 | before(:each) { set_api_key_header } 6 | 7 | it 'should respect the Accept-Language header' do 8 | put '/api/v1/comments/does_not_exist/votes', {}, {'HTTP_ACCEPT_LANGUAGE' => 'x-test'} 9 | expect(last_response.status).to eq(400) 10 | expect(parse(last_response.body).first).to eq('##x-test## requested object not found') 11 | end 12 | end 13 | -------------------------------------------------------------------------------- /spec/api/notifications_and_subscriptions_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | 3 | describe "app" do 4 | describe "notifications and subscriptions" do 5 | 6 | let(:subscriber) { create_test_user(42) } 7 | 8 | before(:each) do 9 | set_api_key_header 10 | setup_10_threads 11 | %w[t9 t7 t5 t3 t1].each { |t| subscriber.subscribe(@threads[t]) } 12 | end 13 | 14 | describe "GET /api/v1/users/:user_id/subscribed_threads" do 15 | 16 | def thread_result(params) 17 | get "/api/v1/users/#{subscriber.id}/subscribed_threads", params 18 | expect(last_response).to be_ok 19 | parse(last_response.body)["collection"] 20 | end 21 | 22 | context "when filtering flagged posts" do 23 | it "returns threads that are flagged" do 24 | @threads["t1"].abuse_flaggers = [1] 25 | @threads["t1"].save! 26 | rs = thread_result course_id: DFLT_COURSE_ID, flagged: true 27 | expect(rs.length).to eq(1) 28 | check_thread_result_json(nil, @threads["t1"], rs.first) 29 | end 30 | it "returns threads that have flagged comments" do 31 | @comments["t2 c3"].abuse_flaggers = [1] # note: not subscribed 32 | @comments["t2 c3"].save! 33 | @comments["t3 c3"].abuse_flaggers = [1] # subscribed 34 | @comments["t3 c3"].save! 35 | rs = thread_result course_id: DFLT_COURSE_ID, flagged: true 36 | expect(rs.length).to eq(1) 37 | check_thread_result_json(nil, @threads["t3"], rs.first) 38 | end 39 | it "returns an empty result when no posts were flagged" do 40 | rs = thread_result course_id: DFLT_COURSE_ID, flagged: true 41 | expect(rs.length).to eq(0) 42 | end 43 | end 44 | it "filters by group_id" do 45 | rs = thread_result course_id: DFLT_COURSE_ID, group_id: 42 46 | expect(rs.length).to eq(5) 47 | @threads["t3"].group_id = 43 48 | @threads["t3"].save! 49 | rs = thread_result course_id: DFLT_COURSE_ID, group_id: 42 50 | expect(rs.length).to eq(4) 51 | @threads["t3"].group_id = 42 52 | @threads["t3"].save! 53 | rs = thread_result course_id: DFLT_COURSE_ID, group_id: 42 54 | expect(rs.length).to eq(5) 55 | end 56 | it "filters by group_ids" do 57 | rs = thread_result course_id: DFLT_COURSE_ID, group_ids: "42" 58 | expect(rs.length).to eq(5) 59 | @threads["t3"].group_id = 43 60 | @threads["t3"].save! 61 | rs = thread_result course_id: DFLT_COURSE_ID, group_ids: "42" 62 | expect(rs.length).to eq(4) 63 | rs = thread_result course_id: DFLT_COURSE_ID, group_ids: "42,43" 64 | expect(rs.length).to eq(5) 65 | end 66 | it "filters unread posts" do 67 | rs = thread_result course_id: DFLT_COURSE_ID 68 | expect(rs.length).to eq(5) 69 | rs2 = thread_result course_id: DFLT_COURSE_ID, unread: true 70 | expect(rs2).to eq(rs) 71 | subscriber.mark_as_read(@threads[rs.first["title"]]) 72 | rs3 = thread_result course_id: DFLT_COURSE_ID, unread: true 73 | expect(rs3).to eq(rs[1..4]) 74 | rs[1..3].each { |r| subscriber.mark_as_read(@threads[r["title"]]) } 75 | rs4 = thread_result course_id: DFLT_COURSE_ID, unread: true 76 | expect(rs4).to eq(rs[4, 1]) 77 | subscriber.mark_as_read(@threads[rs.last["title"]]) 78 | rs5 = thread_result course_id: DFLT_COURSE_ID, unread: true 79 | expect(rs5).to eq([]) 80 | make_comment(create_test_user(Random.new), @threads[rs.first["title"]], "new activity") 81 | rs6 = thread_result course_id: DFLT_COURSE_ID, unread: true 82 | expect(rs6.length).to eq(1) 83 | expect(rs6.first["title"]).to eq(rs.first["title"]) 84 | end 85 | it "filters unanswered questions" do 86 | %w[t9 t7].each do |thread_key| 87 | @threads[thread_key].thread_type = :question 88 | @threads[thread_key].save! 89 | end 90 | rs = thread_result course_id: DFLT_COURSE_ID, unanswered: true 91 | expect(rs.length).to eq(2) 92 | @comments["t7 c0"].endorsed = true 93 | @comments["t7 c0"].save! 94 | rs2 = thread_result course_id: DFLT_COURSE_ID, unanswered: true 95 | expect(rs2.length).to eq(1) 96 | @comments["t9 c0"].endorsed = true 97 | @comments["t9 c0"].save! 98 | rs3 = thread_result course_id: DFLT_COURSE_ID, unanswered: true 99 | expect(rs3.length).to eq(0) 100 | end 101 | it "ignores endorsed comments that are not question responses" do 102 | thread = @threads["t1"] 103 | thread.thread_type = :question 104 | thread.save! 105 | rs = thread_result course_id: DFLT_COURSE_ID, unanswered: true 106 | expect(rs.length).to eq(1) 107 | comment = make_comment(create_test_user(Random.new), thread.comments.first, "comment on a response") 108 | comment.endorsed = true 109 | comment.save! 110 | rs2 = thread_result course_id: DFLT_COURSE_ID, unanswered: true 111 | expect(rs2.length).to eq(1) 112 | end 113 | end 114 | 115 | describe "POST /api/v1/users/:user_id/subscriptions" do 116 | it "subscribe a comment thread" do 117 | thread = @threads["t0"] 118 | post "/api/v1/users/#{subscriber.external_id}/subscriptions", source_type: "thread", source_id: thread.id 119 | expect(last_response).to be_ok 120 | expect(thread.subscribers.length).to eq(1) 121 | expect(thread.subscribers[0]).to eq(subscriber) 122 | end 123 | end 124 | 125 | describe "DELETE /api/v1/users/:user_id/subscriptions" do 126 | it "unsubscribe a comment thread" do 127 | thread = @threads["t2"] 128 | subscriber.subscribe(thread) 129 | expect(thread.subscribers.length).to eq(1) 130 | expect(thread.subscribers[0]).to eq(subscriber) 131 | delete "/api/v1/users/#{subscriber.external_id}/subscriptions", source_type: "thread", source_id: thread.id 132 | expect(last_response).to be_ok 133 | expect(thread.subscribers.length).to eq(0) 134 | end 135 | end 136 | describe "GET /api/v1/threads/:thread_id/subscriptions" do 137 | it "Get subscribers of thread" do 138 | thread = @threads["t2"] 139 | subscriber.subscribe(thread) 140 | expect(thread.subscribers.length).to eq(1) 141 | 142 | get "/api/v1/threads/#{thread.id}/subscriptions", { 'page': 1 } 143 | expect(last_response).to be_ok 144 | response = parse(last_response.body) 145 | expect(response['collection'].length).to eq(1) 146 | expect(response['num_pages']).to eq(1) 147 | expect(response['page']).to eq(1) 148 | expect(response['subscriptions_count']).to eq(1) 149 | puts last_response.body 150 | 151 | end 152 | end 153 | 154 | describe "GET /api/v1/threads/:thread_id/subscriptions" do 155 | it "Get subscribers of thread with pagination" do 156 | thread = @threads["t2"] 157 | 158 | subscriber.subscribe(thread) 159 | create_test_user(43).subscribe(thread) 160 | create_test_user(44).subscribe(thread) 161 | create_test_user(45).subscribe(thread) 162 | create_test_user(46).subscribe(thread) 163 | create_test_user(47).subscribe(thread) 164 | 165 | expect(thread.subscribers.length).to eq(6) 166 | 167 | get "/api/v1/threads/#{thread.id}/subscriptions", { 'page': 1, 'per_page': 2 } 168 | expect(last_response).to be_ok 169 | response = parse(last_response.body) 170 | expect(response['collection'].length).to eq(2) 171 | expect(response['num_pages']).to eq(3) 172 | expect(response['page']).to eq(1) 173 | expect(response['subscriptions_count']).to eq(6) 174 | 175 | get "/api/v1/threads/#{thread.id}/subscriptions", { 'page': 2, 'per_page': 2 } 176 | expect(last_response).to be_ok 177 | response = parse(last_response.body) 178 | expect(response['collection'].length).to eq(2) 179 | expect(response['num_pages']).to eq(3) 180 | expect(response['page']).to eq(2) 181 | expect(response['subscriptions_count']).to eq(6) 182 | end 183 | end 184 | 185 | end 186 | end 187 | -------------------------------------------------------------------------------- /spec/api/notifications_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | 3 | describe "app" do 4 | describe "notifications" do 5 | 6 | before(:each) do 7 | init_without_subscriptions 8 | set_api_key_header 9 | end 10 | 11 | def create_thread(user, options = {}) 12 | # Create a CommentThread with the given user. 13 | # Can optionally specify a cohort group_id via options. 14 | # Returns the created CommentThread. 15 | 16 | commentable = Commentable.new("question_1") 17 | random_string = (0...8).map{ ('a'..'z').to_a[rand(26)] }.join 18 | thread = CommentThread.new( 19 | title: "Test title", body: "elephant otter", course_id: "1", 20 | commentable_id: commentable.id, body: random_string 21 | ) 22 | thread.thread_type = :discussion 23 | thread.author = user 24 | if options[:group_id] 25 | thread.group_id = options[:group_id] 26 | end 27 | thread.save! 28 | 29 | return thread 30 | end 31 | 32 | def get_thread_notification(comment_body, options = {}) 33 | # Creates a thread and comment with the specified comment_body. 34 | # Can optionally specify a cohort group_id via options. 35 | # Calls the notifications API to retrieve the notification for the thread 36 | # and returns the response hash for the single comment thread within the course. 37 | # Keys for the returned hash: content, title, commentable_id, group_id (only present if cohorted). 38 | 39 | start_time = Time.now 40 | user = create_test_user(Random.new) 41 | thread = create_thread(user, options) 42 | 43 | subscription = Subscription.create(:subscriber_id => user._id.to_s, :source_id => thread._id.to_s) 44 | 45 | comment = Comment.new 46 | comment.comment_thread_id = thread.id 47 | comment.body = comment_body 48 | comment.author_id = user.id 49 | comment.course_id = 'test course' 50 | comment.save! 51 | 52 | sleep 1 53 | 54 | end_time = Time.now 55 | 56 | post( 57 | "/api/v1/notifications", 58 | { 59 | from: CGI::escape(start_time.to_s), 60 | to: CGI::escape(end_time.to_s), 61 | user_ids: subscription.subscriber_id 62 | } 63 | ) 64 | 65 | expect(last_response).to be_ok 66 | response_hash = JSON.parse(last_response.body) 67 | return response_hash[user.id][comment.course_id][thread.id.to_s] 68 | end 69 | 70 | describe "POST /api/v1/notifications" do 71 | it "returns notifications by class and user" do 72 | expected_comment_body = random_string = (0..5).map { ('a'..'z').to_a[rand(26)] }.join 73 | thread_notification = get_thread_notification(expected_comment_body) 74 | actual_comment_body = thread_notification["content"][0]["body"] 75 | expect(actual_comment_body).to eq(expected_comment_body) 76 | end 77 | 78 | it "contains cohort group_id if defined" do 79 | thread_notification = get_thread_notification("dummy comment content", :group_id => 1974) 80 | expect(thread_notification["group_id"]).to be(1974) 81 | end 82 | 83 | it "does not contain cohort group_id if not defined" do 84 | thread_notification = get_thread_notification("dummy comment content") 85 | expect(thread_notification.has_key?("group_id")).to be false 86 | end 87 | 88 | it "returns only threads subscribed to by user" do 89 | 90 | # first make a dummy thread and comment and a subscription 91 | commentable = Commentable.new("question_1") 92 | user = create_test_user(Random.new) 93 | thread = create_thread(user) 94 | 95 | subscription = Subscription.create({ :subscriber_id => user._id.to_s, :source_id => thread._id.to_s }) 96 | 97 | comment = Comment.new(body: "dummy body text", course_id: "1") 98 | comment.commentable_id = commentable.id 99 | comment.author = user 100 | comment.comment_thread = thread 101 | comment.save! 102 | 103 | start_time = Date.today - 100.days 104 | 105 | sleep 1 106 | 107 | end_time = Time.now + 5.seconds 108 | 109 | post "/api/v1/notifications", from: CGI::escape(start_time.to_s), to: CGI::escape(end_time.to_s), user_ids: user.id 110 | 111 | expect(last_response).to be_ok 112 | payload = JSON.parse last_response.body 113 | courses = payload[user.id.to_s] 114 | thread_ids = [] 115 | courses.each do |k, v| 116 | v.each do |kk, vv| 117 | thread_ids << kk 118 | end 119 | end 120 | #now make sure the threads are a subset of the user's subscriptions 121 | subscriptions = Subscription.where(:subscriber_id => user.id.to_s) 122 | subscribed_thread_ids = subscriptions.collect { |s| s.source_id } 123 | 124 | expect(subscribed_thread_ids.to_set.superset? thread_ids.to_set).to eq(true) 125 | 126 | end 127 | 128 | it "returns only unflagged threads" do 129 | start_time = Date.today - 100.days 130 | 131 | 132 | user = User.create(:external_id => 1,:username => "example") 133 | 134 | sleep 1 135 | 136 | end_time = Time.now + 5.seconds 137 | 138 | post "/api/v1/notifications", from: CGI::escape(start_time.to_s), to: CGI::escape(end_time.to_s), user_ids: user.id 139 | 140 | expect(last_response).to be_ok 141 | payload = JSON.parse last_response.body 142 | courses = payload[user.id.to_s] 143 | thread_ids = [] 144 | courses.each do |k, v| 145 | v.each do |kk, vv| 146 | thread_ids << kk 147 | end 148 | end 149 | #now flag the first thread 150 | thread = CommentThread.find thread_ids.first 151 | thread.historical_abuse_flaggers << ["1"] 152 | 153 | sleep 1 154 | 155 | end_time = Time.now + 5.seconds 156 | 157 | post "/api/v1/notifications", from: CGI::escape(start_time.to_s), to: CGI::escape(end_time.to_s), user_ids: user.id 158 | expect(last_response).to be_ok 159 | payload = JSON.parse last_response.body 160 | courses = payload[user.id.to_s] 161 | new_thread_ids = [] 162 | courses.each do |k, v| 163 | v.each do |kk, vv| 164 | new_thread_ids << kk 165 | end 166 | end 167 | 168 | expect(new_thread_ids.include? thread.id).to eq(false) 169 | 170 | end 171 | 172 | end 173 | end 174 | end 175 | -------------------------------------------------------------------------------- /spec/api/query_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | require 'faker' 3 | 4 | 5 | describe 'app' do 6 | include_context 'search_enabled' 7 | before(:each) { set_api_key_header } 8 | let(:body) { Faker::Lorem.word } 9 | 10 | describe 'GET /api/v1/search/threads' do 11 | 12 | shared_examples_for 'a search endpoint' do 13 | subject do 14 | TaskHelpers::ElasticsearchHelper.refresh_indices 15 | get '/api/v1/search/threads', text: body 16 | end 17 | 18 | let(:matched_thread) { parse(subject.body)['collection'].select { |t| t['id'] == thread.id.to_s }.first } 19 | 20 | it { is_expected.to be_ok } 21 | 22 | it 'returns thread with query match' do 23 | expect(matched_thread).to_not be_nil 24 | check_thread_result_json(nil, thread, matched_thread) 25 | end 26 | end 27 | 28 | context 'when searching on thread content' do 29 | let!(:thread) { create(:comment_thread, body: body) } 30 | 31 | it_behaves_like 'a search endpoint' 32 | end 33 | 34 | context 'when searching on comment content' do 35 | let!(:thread) do 36 | comment = create(:comment, body: body) 37 | thread = comment.comment_thread 38 | end 39 | 40 | it_behaves_like 'a search endpoint' 41 | end 42 | end 43 | end 44 | -------------------------------------------------------------------------------- /spec/api/vote_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | 3 | describe "app" do 4 | describe "votes" do 5 | 6 | before(:each) do 7 | init_without_subscriptions 8 | set_api_key_header 9 | end 10 | 11 | describe "PUT /api/v1/comments/:comment_id/votes" do 12 | it "create or update the vote on the comment" do 13 | user = User.first 14 | comment = Comment.first 15 | prev_up_votes = comment.up_votes_count 16 | prev_down_votes = comment.down_votes_count 17 | put "/api/v1/comments/#{comment.id}/votes", user_id: user.id, value: "down" 18 | comment = Comment.find(comment.id) 19 | expect(comment.up_votes_count).to eq(prev_up_votes - 1) 20 | expect(comment.down_votes_count).to eq(prev_down_votes + 1) 21 | end 22 | it "returns 400 when the comment does not exist" do 23 | put "/api/v1/comments/does_not_exist/votes", user_id: User.first.id, value: "down" 24 | expect(last_response.status).to eq(400) 25 | expect(parse(last_response.body).first).to eq(I18n.t(:requested_object_not_found)) 26 | end 27 | it "returns 400 when user_id is not provided" do 28 | put "/api/v1/comments/#{Comment.first.id}/votes", value: "down" 29 | expect(last_response.status).to eq(400) 30 | expect(parse(last_response.body).first).to eq(I18n.t(:user_id_is_required)) 31 | end 32 | it "returns 400 when value is not provided or invalid" do 33 | put "/api/v1/comments/#{Comment.first.id}/votes", user_id: User.first.id 34 | expect(last_response.status).to eq(400) 35 | expect(parse(last_response.body).first).to eq(I18n.t(:value_is_required)) 36 | put "/api/v1/comments/#{Comment.first.id}/votes", user_id: User.first.id, value: "superdown" 37 | expect(last_response.status).to eq(400) 38 | expect(parse(last_response.body).first).to eq(I18n.t(:value_is_invalid)) 39 | end 40 | end 41 | describe "DELETE /api/v1/comments/:comment_id/votes" do 42 | it "unvote on the comment" do 43 | user = User.first 44 | comment = Comment.first 45 | prev_up_votes = comment.up_votes_count 46 | prev_down_votes = comment.down_votes_count 47 | delete "/api/v1/comments/#{comment.id}/votes", user_id: user.id 48 | comment = Comment.find(comment.id) 49 | expect(comment.up_votes_count).to eq(prev_up_votes - 1) 50 | expect(comment.down_votes_count).to eq(prev_down_votes) 51 | end 52 | it "unvote on the comment is idempotent" do 53 | user = User.first 54 | comment = Comment.first 55 | prev_up_votes = comment.up_votes_count 56 | prev_down_votes = comment.down_votes_count 57 | delete "/api/v1/comments/#{comment.id}/votes", user_id: user.id 58 | # multiple calls to unvote endpoint should not change the data 59 | delete "/api/v1/comments/#{comment.id}/votes", user_id: user.id 60 | comment = Comment.find(comment.id) 61 | expect(comment.up_votes_count).to eq(prev_up_votes - 1) 62 | expect(comment.down_votes_count).to eq(prev_down_votes) 63 | end 64 | it "returns 400 when the comment does not exist" do 65 | delete "/api/v1/comments/does_not_exist/votes", user_id: User.first.id 66 | expect(last_response.status).to eq(400) 67 | expect(parse(last_response.body).first).to eq(I18n.t(:requested_object_not_found)) 68 | end 69 | it "returns 400 when user_id is not provided" do 70 | delete "/api/v1/comments/#{Comment.first.id}/votes" 71 | expect(last_response.status).to eq(400) 72 | expect(parse(last_response.body).first).to eq(I18n.t(:user_id_is_required)) 73 | end 74 | end 75 | describe "PUT /api/v1/threads/:thread_id/votes" do 76 | it "create or update the vote on the thread" do 77 | user = User.first 78 | thread = CommentThread.first 79 | prev_up_votes = thread.up_votes_count 80 | prev_down_votes = thread.down_votes_count 81 | put "/api/v1/threads/#{thread.id}/votes", user_id: user.id, value: "down" 82 | thread = CommentThread.find(thread.id) 83 | expect(thread.up_votes_count).to eq(prev_up_votes - 1) 84 | expect(thread.down_votes_count).to eq(prev_down_votes + 1) 85 | end 86 | it "vote on the thread is idempotent" do 87 | user = User.first 88 | thread = CommentThread.first 89 | prev_up_votes = thread.up_votes_count 90 | prev_down_votes = thread.down_votes_count 91 | put "/api/v1/threads/#{thread.id}/votes", user_id: user.id, value: "down" 92 | put "/api/v1/threads/#{thread.id}/votes", user_id: user.id, value: "down" 93 | thread = CommentThread.find(thread.id) 94 | expect(thread.up_votes_count).to eq(prev_up_votes - 1) 95 | expect(thread.down_votes_count).to eq(prev_down_votes + 1) 96 | end 97 | it "returns 400 when the thread does not exist" do 98 | put "/api/v1/threads/does_not_exist/votes", user_id: User.first.id, value: "down" 99 | expect(last_response.status).to eq(400) 100 | expect(parse(last_response.body).first).to eq(I18n.t(:requested_object_not_found)) 101 | end 102 | it "returns 400 when user_id is not provided" do 103 | put "/api/v1/threads/#{CommentThread.first.id}/votes", value: "down" 104 | expect(last_response.status).to eq(400) 105 | expect(parse(last_response.body).first).to eq(I18n.t(:user_id_is_required)) 106 | end 107 | it "returns 400 when value is not provided or invalid" do 108 | put "/api/v1/threads/#{CommentThread.first.id}/votes", user_id: User.first.id 109 | expect(last_response.status).to eq(400) 110 | expect(parse(last_response.body).first).to eq(I18n.t(:value_is_required)) 111 | put "/api/v1/threads/#{CommentThread.first.id}/votes", user_id: User.first.id, value: "superdown" 112 | expect(last_response.status).to eq(400) 113 | expect(parse(last_response.body).first).to eq(I18n.t(:value_is_invalid)) 114 | end 115 | end 116 | describe "DELETE /api/v1/threads/:thread_id/votes" do 117 | it "unvote on the thread" do 118 | user = User.first 119 | thread = CommentThread.first 120 | prev_up_votes = thread.up_votes_count 121 | prev_down_votes = thread.down_votes_count 122 | delete "/api/v1/threads/#{thread.id}/votes", user_id: user.id 123 | thread = CommentThread.find(thread.id) 124 | expect(thread.up_votes_count).to eq(prev_up_votes - 1) 125 | expect(thread.down_votes_count).to eq(prev_down_votes) 126 | end 127 | it "returns 400 when the comment does not exist" do 128 | delete "/api/v1/threads/does_not_exist/votes", user_id: User.first.id 129 | expect(last_response.status).to eq(400) 130 | expect(parse(last_response.body).first).to eq(I18n.t(:requested_object_not_found)) 131 | end 132 | it "returns 400 when user_id is not provided" do 133 | delete "/api/v1/threads/#{CommentThread.first.id}/votes" 134 | expect(last_response.status).to eq(400) 135 | expect(parse(last_response.body).first).to eq(I18n.t(:user_id_is_required)) 136 | end 137 | end 138 | end 139 | end 140 | -------------------------------------------------------------------------------- /spec/app_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | 3 | describe 'app' do 4 | describe 'access control' do 5 | let(:user) { create_test_user(42) } 6 | # all routes (even nonexistent ones) are covered by the api key 7 | # /heartbeat is the only exception, covered in the heartbeat tests below 8 | let(:urls) do 9 | { 10 | "/" => 404, 11 | "/api/v1/users/#{user.id}" => 200, 12 | "/api/v1/users/doesnotexist" => 404, 13 | "/selftest" => 200 14 | } 15 | end 16 | 17 | it 'returns 401 when api key header is not set' do 18 | urls.keys.each do |url| 19 | get url 20 | expect(last_response.status).to eq 401 21 | end 22 | end 23 | 24 | it 'returns 401 when api key value is incorrect' do 25 | urls.keys.each do |url| 26 | get url, {}, {'HTTP_X_EDX_API_KEY' => "incorrect-#{TEST_API_KEY}"} 27 | expect(last_response.status).to eq 401 28 | end 29 | end 30 | 31 | it 'allows requests when api key value is correct' do 32 | urls.each do |url, status| 33 | get url, {}, {'HTTP_X_EDX_API_KEY' => TEST_API_KEY} 34 | expect(last_response.status).to eq status 35 | end 36 | end 37 | end 38 | 39 | describe 'heartbeat monitoring' do 40 | subject do 41 | get '/heartbeat' 42 | last_response 43 | end 44 | 45 | it 'does not require api key' do 46 | expect(subject.status).to eq 200 47 | end 48 | 49 | context 'db check' do 50 | def test_db_check(response, is_success) 51 | db = double("db") 52 | allow(stub_const('Mongoid::Clients', Class.new)).to receive(:default).and_return(db) 53 | result = double('result') 54 | allow(result).to receive(:ok?).and_return(response['ok'] == 1) 55 | allow(result).to receive(:documents).and_return([response]) 56 | allow(db).to receive(:close).and_return(true) 57 | allow(db).to receive(:reconnect).and_return(true) 58 | allow(db).to receive(:options).and_return({read: {mode: :primary}}) 59 | # should be checked twice, because it will retry 60 | expect(db).to receive(:command).with({:isMaster => 1}).twice.and_return(result) 61 | 62 | body = parse(subject.body) 63 | if is_success 64 | expect(subject.status).to eq 200 65 | expect(body).to eq({'OK' => true}) 66 | else 67 | expect(subject.status).to eq 500 68 | expect(body).to eq({'OK' => false, 'check' => 'db'}) 69 | end 70 | end 71 | 72 | it 'reports success when mongo is ready' do 73 | test_db_check({'ismaster' => true, 'ok' => 1}, true) 74 | end 75 | 76 | it 'reports failure when mongo is not master' do 77 | test_db_check({'ismaster' => false, 'ok' => 1}, false) 78 | end 79 | 80 | it 'reports failure when mongo is not OK' do 81 | test_db_check({'ismaster' => true, 'ok' => 0}, false) 82 | end 83 | 84 | it 'reports failure when command response is unexpected' do 85 | test_db_check({'foo' => 'bar'}, false) 86 | end 87 | 88 | it 'reports failure when db command raises an error' do 89 | db = double('db') 90 | allow(stub_const('Mongoid::Clients', Class.new)).to receive(:default).and_return(db) 91 | allow(db).to receive(:close).and_return(true) 92 | allow(db).to receive(:reconnect).and_return(true) 93 | # should be checked twice, because it will retry 94 | expect(db).to receive(:command).with({:isMaster => 1}).twice.and_raise(StandardError) 95 | 96 | expect(subject.status).to eq 500 97 | expect(parse(subject.body)).to eq({'OK' => false, 'check' => 'db'}) 98 | end 99 | end 100 | 101 | context 'elasticsearch check' do 102 | after(:each) { WebMock.reset! } 103 | 104 | def test_es_check(service_available, status='green', timed_out=false) 105 | body = { 106 | status: status, 107 | timed_out: timed_out, 108 | } 109 | url = "#{CommentService.config[:elasticsearch_server]}/_cluster/health" 110 | stub = stub_request(:any, url).to_return(body: body.to_json, headers: {'Content-Type' => 'application/json'}) 111 | 112 | body = parse(subject.body) 113 | expect(stub).to have_been_requested 114 | 115 | if service_available 116 | expect(last_response.status).to eq 200 117 | expect(body).to eq({'OK' => true}) 118 | else 119 | expect(last_response.status).to eq 500 120 | expect(body).to eq({'OK' => false, 'check' => 'es'}) 121 | end 122 | end 123 | 124 | it 'reports success if cluster status is green' do 125 | test_es_check(true, 'green') 126 | end 127 | 128 | it 'reports success if cluster status is yellow' do 129 | test_es_check(true, 'yellow') 130 | end 131 | 132 | it 'reports failure if cluster status is red' do 133 | test_es_check(false, 'red') 134 | end 135 | 136 | it 'reports failure if cluster status is unexpected' do 137 | test_es_check(false, 'unexpected') 138 | end 139 | 140 | it 'reports failure if the cluster health check times out' do 141 | test_es_check(false, 'green', true) 142 | end 143 | end 144 | end 145 | 146 | describe 'selftest' do 147 | subject do 148 | get '/selftest', {}, {'HTTP_X_EDX_API_KEY' => TEST_API_KEY} 149 | parse(last_response.body) 150 | end 151 | 152 | it 'returns valid JSON on success' do 153 | expect(subject).to include('db', 'es', 'total_posts', 'total_users', 'last_post_created', 'elapsed_time') 154 | end 155 | 156 | it 'handles when the database is empty' do 157 | expect(subject).to include('total_users' => 0, 158 | 'total_posts' => 0, 159 | 'last_post_created' => nil) 160 | end 161 | 162 | it 'handles when the database is not empty' do 163 | thread = create(:comment_thread) 164 | expect(subject).to include( 165 | 'total_users' => 1, 166 | 'total_posts' => 1, 167 | 'last_post_created' => thread.created_at.utc.iso8601) 168 | end 169 | 170 | it "displays tracebacks on failure" do 171 | url = "#{CommentService.config[:elasticsearch_server]}/_cluster/health" 172 | stub = stub_request(:any, url).to_raise(StandardError) 173 | 174 | get '/selftest', {}, {'HTTP_X_EDX_API_KEY' => TEST_API_KEY} 175 | expect(stub).to have_been_requested 176 | WebMock.reset! 177 | 178 | expect(last_response.status).to eq 500 179 | expect(last_response.headers).to include('Content-Type' => 'text/plain') 180 | expect(last_response.body).to include 'StandardError' 181 | expect(last_response.body).to include File.expand_path(__FILE__) 182 | end 183 | end 184 | 185 | describe 'config' do 186 | describe 'Elasticsearch client' do 187 | subject { Elasticsearch::Model.client } 188 | 189 | it 'has a host value set to that from application.yaml' do 190 | expected = URI::parse(CommentService.config[:elasticsearch_server]) 191 | host = subject.transport.hosts[0] 192 | host[:port] = host[:port].to_i 193 | expect(URI::HTTP.build(host)).to eq expected 194 | end 195 | end 196 | end 197 | end 198 | -------------------------------------------------------------------------------- /spec/factories.rb: -------------------------------------------------------------------------------- 1 | require 'faker' 2 | 3 | # Reload i18n data for faker 4 | I18n.reload! 5 | 6 | FactoryBot.define do 7 | factory :user do 8 | # Initialize the model with all attributes since we are using a custom _id field. 9 | # See https://github.com/thoughtbot/factory_bot/issues/544. 10 | initialize_with { new(**attributes) } 11 | 12 | sequence(:username) { |n| "#{Faker::Internet.user_name}_#{n}" } 13 | sequence(:external_id) { username } 14 | end 15 | 16 | factory :comment_thread do 17 | title { Faker::Lorem.sentence } 18 | body { Faker::Lorem.paragraph } 19 | course_id { Faker::Lorem.word } 20 | thread_type { :discussion } 21 | commentable_id { Faker::Lorem.word } 22 | association :author, factory: :user 23 | group_id { nil } 24 | pinned { false } 25 | 26 | trait :subscribe_author do 27 | after(:create) do |thread| 28 | thread.author.subscribe(thread) 29 | end 30 | end 31 | 32 | trait :with_group_id do 33 | group_id { Faker::Number.number(digits: 4) } 34 | end 35 | end 36 | 37 | factory :comment do 38 | association :author, factory: :user 39 | comment_thread { parent ? parent.comment_thread : create(:comment_thread) } 40 | body { Faker::Lorem.paragraph } 41 | course_id { comment_thread.course_id } 42 | commentable_id { comment_thread.commentable_id } 43 | endorsed { false } 44 | end 45 | end 46 | -------------------------------------------------------------------------------- /spec/lib/task_helpers_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | require 'elasticsearch' 3 | 4 | describe TaskHelpers do 5 | describe TaskHelpers::ElasticsearchHelper do 6 | let(:alias_name) { 'test_alias' } 7 | 8 | before(:each) do 9 | TaskHelpers::ElasticsearchHelper.delete_indices 10 | TaskHelpers::ElasticsearchHelper.rebuild_indices 11 | end 12 | 13 | def assert_alias_points_to_index(alias_name, index_name) 14 | test_alias = Elasticsearch::Model.client.indices.get_alias(name: alias_name).keys[0] 15 | expect(test_alias).to eq(index_name) 16 | end 17 | 18 | context("#move_alias") do 19 | before(:each) do 20 | @index_names = TaskHelpers::ElasticsearchHelper.create_indices 21 | @index_name = @index_names[0] 22 | end 23 | 24 | after(:each) do 25 | Elasticsearch::Model.client.indices.delete(index: @index_names, ignore_unavailable: true) 26 | end 27 | 28 | it "points alias to index" do 29 | TaskHelpers::ElasticsearchHelper.move_alias(alias_name, @index_name) 30 | assert_alias_points_to_index(alias_name, @index_name) 31 | end 32 | 33 | it "fails when alias is same as index_name" do 34 | expect { TaskHelpers::ElasticsearchHelper.move_alias(@index_name, @index_name) }.to raise_error 35 | end 36 | 37 | it "fails when index doesn't exist" do 38 | expect { TaskHelpers::ElasticsearchHelper.move_alias(alias_name, 'missing_index') }.to raise_error 39 | end 40 | 41 | end 42 | 43 | context("#rebuild_indices") do 44 | include_context 'search_enabled' 45 | 46 | it "builds new index with content" do 47 | create(:comment_thread, body: 'the best test body', course_id: 'test_course_id') 48 | TaskHelpers::ElasticsearchHelper.refresh_indices 49 | expect(Elasticsearch::Model.client.search( 50 | index: TaskHelpers::ElasticsearchHelper::index_names 51 | )['hits']['total']['value']).to be > 0 52 | end 53 | 54 | end 55 | 56 | context("#validate_indices") do 57 | subject { TaskHelpers::ElasticsearchHelper.validate_indices} 58 | 59 | it "validates the 'content' alias exists with proper mappings" do 60 | subject 61 | end 62 | 63 | it "fails if one of the index doesn't exist" do 64 | Elasticsearch::Model.client.indices.delete(index: TaskHelpers::ElasticsearchHelper::temporary_index_names[0]) 65 | expect{subject}.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound) 66 | Elasticsearch::Model.client.indices.delete(index: TaskHelpers::ElasticsearchHelper::temporary_index_names[1]) 67 | end 68 | 69 | end 70 | 71 | context("#validate_prefixes") do 72 | subject { TaskHelpers::ElasticsearchHelper} 73 | PREFIX = 'prefix_' 74 | 75 | before(:each) do 76 | CommentService.config[:elasticsearch_index_prefix] = PREFIX 77 | end 78 | 79 | after(:each) do 80 | CommentService.config[:elasticsearch_index_prefix] = "" 81 | end 82 | 83 | it "fails if comment model isn't prefixed" do 84 | expect(Comment.index_name).to start_with(PREFIX) 85 | end 86 | 87 | it "fails if comment thread model isn't prefixed" do 88 | expect(CommentThread.index_name).to start_with(PREFIX) 89 | end 90 | 91 | it "fails if indexes created indexes aren't prefixed" do 92 | expect(TaskHelpers::ElasticsearchHelper.index_names.all? {|v| v.start_with?(PREFIX)} ).to be_truthy 93 | end 94 | end 95 | 96 | end 97 | end 98 | -------------------------------------------------------------------------------- /spec/lib/tasks/search_rake_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | require 'elasticsearch' 3 | 4 | describe "search:rebuild_indices" do 5 | include_context "rake" 6 | 7 | before do 8 | allow(TaskHelpers::ElasticsearchHelper).to receive(:rebuild_indices) 9 | end 10 | 11 | its(:prerequisites) { should include("environment") } 12 | 13 | it "calls rebuild_indices with defaults" do 14 | expect(TaskHelpers::ElasticsearchHelper).to receive(:rebuild_indices).with(500, 5) 15 | 16 | subject.invoke 17 | end 18 | 19 | it "calls rebuild_indices with arguments" do 20 | # Rake calls receive arguments as strings. 21 | batch_size = '100' 22 | extra_catchup_minutes = '10' 23 | expect(TaskHelpers::ElasticsearchHelper).to receive(:rebuild_indices).with( 24 | batch_size.to_i, extra_catchup_minutes.to_i 25 | ) 26 | 27 | subject.invoke(batch_size, extra_catchup_minutes) 28 | end 29 | end 30 | 31 | describe "search:catchup" do 32 | include_context "rake" 33 | let(:indices) { TaskHelpers::ElasticsearchHelper::index_names } 34 | let(:comments_index_name) { Comment.index_name } 35 | let(:comment_threads_index_name) { CommentThread.index_name } 36 | 37 | before do 38 | allow(TaskHelpers::ElasticsearchHelper).to receive(:catchup_indices) 39 | end 40 | 41 | its(:prerequisites) { should include("environment") } 42 | 43 | it "calls catchup with defaults" do 44 | expect(TaskHelpers::ElasticsearchHelper).to receive(:catchup_indices).with(indices, anything, 500) 45 | 46 | subject.invoke(comments_index_name, comment_threads_index_name) 47 | end 48 | 49 | it "calls catchup with arguments" do 50 | # Rake calls receive arguments as strings. 51 | minutes = '2' 52 | batch_size = '100' 53 | expect(TaskHelpers::ElasticsearchHelper).to receive(:catchup_indices).with(indices, anything, batch_size.to_i) 54 | 55 | subject.invoke(comments_index_name, comment_threads_index_name, minutes, batch_size) 56 | end 57 | end 58 | -------------------------------------------------------------------------------- /spec/lib/unicorn_helpers_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | require 'elasticsearch' 3 | 4 | describe UnicornHelpers do 5 | include_context 'search_enabled' 6 | 7 | context("#exit_on_invalid_index") do 8 | subject { UnicornHelpers.exit_on_invalid_index } 9 | 10 | it "doesn't exit when index is valid" do 11 | # code 101 is special code recongnized by forum-supervisor.sh 12 | expect{subject}.not_to exit_with_code(101) 13 | end 14 | 15 | it "exits when index is invalid" do 16 | TaskHelpers::ElasticsearchHelper.delete_indices 17 | # code 101 is special code recongnized by forum-supervisor.sh 18 | expect{subject}.to exit_with_code(101) 19 | end 20 | 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /spec/models/at_user_observer_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | 3 | # Commenting out until notifications are used again. 4 | # 5 | #describe AtUserObserver do 6 | # before :each do 7 | # @text = 8 | #""" 9 | #hi @tom, I have a question from @pi314 about the following code: 10 | #``` 11 | #class A 12 | # def set_some_variable 13 | # @some_variable = 1 14 | # end 15 | #end 16 | #``` 17 | #and also the following code 18 | # class A 19 | # def get_some_variable 20 | # @some_variable 21 | # end 22 | # end 23 | #what is the 'at' symbol doing there? @dementrock 24 | #""" 25 | # User.delete_all 26 | # User.create!(external_id: "1", username: "tom") 27 | # User.create!(external_id: "2", username: "pi314") 28 | # end 29 | # 30 | # describe "#get_marked_text(text)" do 31 | # it "returns marked at text" do 32 | # converted = AtUserObserver.send :get_marked_text, @text 33 | # converted.should include "@tom_0" 34 | # converted.should include "@pi314_1" 35 | # converted.should include "@some_variable_2" 36 | # converted.should include "@some_variable_3" 37 | # converted.should include "@dementrock_4" 38 | # end 39 | # end 40 | # 41 | # describe "#get_valid_at_position_list(text)" do 42 | # it "returns the list of positions for the valid @ notifications, filtering out the ones in code blocks" do 43 | # list = AtUserObserver.send :get_valid_at_position_list, @text 44 | # list.should include({ position: 0, username: "tom", user_id: "1" }) 45 | # list.should include({ position: 1, username: "pi314", user_id: "2" }) 46 | # list.length.should == 2 47 | # end 48 | # end 49 | #end 50 | -------------------------------------------------------------------------------- /spec/models/comment_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | require 'unicode_shared_examples' 3 | 4 | describe Comment do 5 | let(:author) do 6 | create_test_user(42) 7 | end 8 | 9 | let(:course_thread) do 10 | make_thread(author, "Test course thread", "test_course", "test_commentable", :discussion, :course) 11 | end 12 | 13 | let(:standalone_thread) do 14 | make_thread(author, "Test standalone thread", "test_course", "test_commentable", :discussion, :standalone) 15 | end 16 | 17 | def test_unicode_data(text) 18 | comment = make_comment(author, course_thread, text) 19 | retrieved = Comment.find(comment._id) 20 | expect(retrieved.body).to eq(text) 21 | end 22 | 23 | include_examples "unicode data" 24 | 25 | describe '#context' do 26 | context 'with standalone_thread' do 27 | it 'returns "standalone"' do 28 | comment = make_comment(author, standalone_thread, "comment") 29 | expect(comment.context).to eq("standalone") 30 | end 31 | end 32 | 33 | context 'with course_thread' do 34 | it 'returns "course"' do 35 | comment = make_comment(author, course_thread, "comment") 36 | expect(comment.context).to eq("course") 37 | end 38 | end 39 | 40 | context 'without valid parent thread' do 41 | it 'returns nil' do 42 | comment = make_comment(author, course_thread, "comment") 43 | comment.comment_thread_id = 'not a thread' 44 | expect(comment.context).to eq(nil) 45 | end 46 | end 47 | end 48 | 49 | describe '#course_context?' do 50 | context 'with standalone_thread' do 51 | it 'returns false' do 52 | comment = make_comment(author, standalone_thread, "comment") 53 | expect(comment.course_context?).to be false 54 | end 55 | end 56 | 57 | context 'with course_thread' do 58 | it 'returns true' do 59 | comment = make_comment(author, course_thread, "comment") 60 | expect(comment.course_context?).to be true 61 | end 62 | end 63 | 64 | context 'without valid parent thread' do 65 | it 'returns false' do 66 | comment = make_comment(author, course_thread, "comment") 67 | comment.comment_thread_id = 'not a thread' 68 | expect(comment.course_context?).to be false 69 | end 70 | end 71 | end 72 | 73 | describe '#standalone_context?' do 74 | context 'with standalone_thread' do 75 | it 'returns true' do 76 | comment = make_comment(author, standalone_thread, "comment") 77 | expect(comment.standalone_context?).to be true 78 | end 79 | end 80 | 81 | context 'with course_thread' do 82 | it 'returns false' do 83 | comment = make_comment(author, course_thread, "comment") 84 | expect(comment.standalone_context?).to be false 85 | end 86 | end 87 | 88 | context 'without valid parent thread' do 89 | it 'returns false' do 90 | comment = make_comment(author, course_thread, "comment") 91 | comment.comment_thread_id = 'not a thread' 92 | expect(comment.standalone_context?).to be false 93 | end 94 | end 95 | 96 | end 97 | 98 | describe '#child_count' do 99 | context 'with course_thread' do 100 | it 'returns cached child count' do 101 | comment = make_comment(author, course_thread, "comment") 102 | child_comment = make_comment(author, comment, "comment") 103 | expect(comment.get_cached_child_count).to eq(1) 104 | end 105 | 106 | it 'returns cached child count' do 107 | comment = make_comment(author, course_thread, "comment") 108 | child_comment = make_comment(author, comment, "comment") 109 | comment.child_count = nil 110 | expect(comment.get_cached_child_count).to eq(1) 111 | end 112 | 113 | it 'updates cached child count' do 114 | comment = make_comment(author, course_thread, "comment") 115 | expect(comment.get_cached_child_count).to eq(0) 116 | comment.child_count = 2 117 | expect(comment.get_cached_child_count).to eq(2) 118 | comment.update_cached_child_count 119 | expect(comment.get_cached_child_count).to eq(0) 120 | end 121 | end 122 | end 123 | end 124 | 125 | describe 'comment_with_es' do 126 | include_context 'search_enabled' 127 | 128 | let(:author) do 129 | create_test_user(42) 130 | end 131 | 132 | let(:standalone_thread) do 133 | make_thread(author, "Test standalone thread", "test_course", "test_commentable", :discussion, :standalone) 134 | end 135 | 136 | context 'with search_enabled, updating a comment' do 137 | it 'results in ES proxy called' do 138 | comment = make_comment(author, standalone_thread, "comment") 139 | expect(comment.__elasticsearch__).to receive(:update_document).once.and_call_original 140 | comment.update!({body: "changed"}) 141 | end 142 | 143 | it 'results in ES proxy not called when explicitly disabled' do 144 | comment = make_comment(author, standalone_thread, "comment") 145 | expect(comment.__elasticsearch__).to_not receive(:update_document).and_call_original 146 | comment.without_es do 147 | comment.update!({body: "changed"}) 148 | end 149 | end 150 | 151 | it 'leaves the enable_es variable intact despite any errors during update' do 152 | comment = make_comment(author, standalone_thread, "comment") 153 | expect(comment.__elasticsearch__).to_not receive(:update_document).and_call_original 154 | begin 155 | comment.without_es do 156 | raise # this line simulates what would happen if the update command threw an exception 157 | end 158 | rescue 159 | expect(comment.es_enabled?).to be(true) 160 | end 161 | end 162 | end 163 | end 164 | -------------------------------------------------------------------------------- /spec/models/comment_thread_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | require 'unicode_shared_examples' 3 | 4 | describe CommentThread do 5 | let(:author) do 6 | create_test_user(42) 7 | end 8 | 9 | before (:each) do 10 | [Comment, CommentThread, User].each(&:delete_all).each(&:remove_indexes).each(&:create_indexes) 11 | end 12 | 13 | context "sorting" do 14 | it "indexes comments in hierarchical order" do 15 | 16 | author = create_test_user('billy') 17 | 18 | thread = CommentThread.new(title: "test case", body: "testing 123", course_id: "foo", commentable_id: "bar") 19 | thread.thread_type = :discussion 20 | thread.author = author 21 | thread.save! 22 | 23 | a = thread.comments.new(body: "a", course_id: "foo") 24 | a.author = author 25 | a.save! 26 | 27 | b = a.children.new(body: "b", course_id: "foo") 28 | b.author = author 29 | b.comment_thread = thread 30 | b.save! 31 | 32 | c = b.children.new(body: "c", course_id: "foo") 33 | c.author = author 34 | c.comment_thread = thread 35 | c.save! 36 | 37 | d = b.children.new(body: "d", course_id: "foo") 38 | d.author = author 39 | d.comment_thread = thread 40 | d.save! 41 | 42 | e = a.children.new(body: "e", course_id: "foo") 43 | e.author = author 44 | e.comment_thread = thread 45 | e.save! 46 | 47 | f = thread.comments.new(body: "f", course_id: "foo") 48 | f.author = author 49 | f.save! 50 | 51 | seq = [] 52 | rs = Comment.where(comment_thread_id: thread.id).order_by({"sk"=>1}) 53 | rs.each.map {|c| seq << c.body} 54 | expect(seq).to eq(["a", "b", "c", "d", "e", "f"]) 55 | 56 | end 57 | end 58 | 59 | context "scoping" do 60 | before(:each) do 61 | author = create_test_user('billy') 62 | 63 | # create a course thread 64 | course_thread = CommentThread.new(title: "course thread", body: "testing 123", course_id: "foo", commentable_id: "bar") 65 | course_thread.thread_type = :discussion 66 | course_thread.author = author 67 | course_thread.context = :course 68 | course_thread.save! 69 | 70 | # create a course thread (using the default context rather than setting it explicitly) 71 | course_thread = CommentThread.new(title: "course thread", body: "testing 123", course_id: "foo", commentable_id: "bar") 72 | course_thread.thread_type = :discussion 73 | course_thread.author = author 74 | course_thread.save! 75 | 76 | # create a standalone thread 77 | standalone_thread = CommentThread.new(title: "standalone_thread thread", body: "testing 123", course_id: "foo", commentable_id: "bear") 78 | standalone_thread.thread_type = :discussion 79 | standalone_thread.author = author 80 | standalone_thread.context = :standalone 81 | standalone_thread.save! 82 | end 83 | 84 | context '#unscoped' do 85 | it 'returns all' do 86 | expect(CommentThread.count).to eq(3) 87 | end 88 | end 89 | 90 | context "#course_context" do 91 | it 'returns all' do 92 | threads = CommentThread.course_context 93 | expect(threads.count).to eq(2) 94 | expect(threads.first.title).to eq("course thread") 95 | end 96 | end 97 | 98 | context "#standalone_context" do 99 | it 'returns all' do 100 | threads = CommentThread.standalone_context 101 | expect(threads.count).to eq(1) 102 | expect(threads.first.title).to eq("standalone_thread thread") 103 | end 104 | end 105 | end 106 | 107 | def test_unicode_data(text) 108 | thread = make_thread(author, text, "unicode_course", commentable_id: "unicode_commentable") 109 | retrieved = CommentThread.find(thread._id) 110 | expect(retrieved.title).to eq(text) 111 | expect(retrieved.body).to eq(text) 112 | end 113 | 114 | include_examples "unicode data" 115 | end 116 | 117 | -------------------------------------------------------------------------------- /spec/models/user_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | require 'unicode_shared_examples' 3 | 4 | describe User do 5 | let(:author) { create_test_user(666) } 6 | let(:reader) { create_test_user(667) } 7 | let(:thread) { make_standalone_thread(author) } 8 | 9 | before(:each) do 10 | [Comment, CommentThread, User].each(&:delete_all).each(&:remove_indexes).each(&:create_indexes) 11 | end 12 | 13 | it "should have no votes if it never voted" do 14 | expect(reader.upvoted_ids).to eq([]) 15 | end 16 | 17 | it "should have one vote if it voted once" do 18 | expect(reader.upvoted_ids).to eq([]) 19 | reader.vote(thread, :up) 20 | expect(reader.upvoted_ids).to eq([thread._id]) 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /spec/presenters/thread_list_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | 3 | describe ThreadListPresenter do 4 | context "#initialize" do 5 | before(:each) do 6 | User.all.delete 7 | Content.all.delete 8 | @threads = (1..3).map do |n| 9 | t = make_thread( 10 | create_test_user("author#{n}"), 11 | "thread #{n}", 12 | 'foo', 'bar' 13 | ) 14 | end 15 | @reader = create_test_user('reader') 16 | end 17 | 18 | it "handles unread threads" do 19 | pres = ThreadListPresenter.new(@threads, @reader, 'foo') 20 | pres.to_hash.each_with_index do |h, i| 21 | expect(h).to eq(ThreadPresenter.factory(@threads[i], @reader).to_hash) 22 | end 23 | end 24 | 25 | it "handles read threads" do 26 | @reader.mark_as_read(@threads[0]) 27 | @reader.save! 28 | pres = ThreadListPresenter.new(@threads, @reader, 'foo') 29 | pres.to_hash.each_with_index do |h, i| 30 | expect(h).to eq(ThreadPresenter.factory(@threads[i], @reader).to_hash) 31 | end 32 | end 33 | 34 | it "handles empty list of threads" do 35 | pres = ThreadListPresenter.new([], @reader, 'foo') 36 | expect(pres.to_hash).to eq([]) 37 | end 38 | 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /spec/support/database_cleaner.rb: -------------------------------------------------------------------------------- 1 | require 'mongoid_cleaner' 2 | 3 | RSpec.configure do |config| 4 | config.before(:suite) do 5 | MongoidCleaner.strategy = :drop 6 | end 7 | 8 | config.around(:each) do |example| 9 | MongoidCleaner.cleaning do 10 | example.run 11 | end 12 | end 13 | end 14 | -------------------------------------------------------------------------------- /spec/support/elasticsearch.rb: -------------------------------------------------------------------------------- 1 | require 'task_helpers' 2 | 3 | 4 | RSpec.shared_context 'search_enabled' do 5 | 6 | before(:all) do 7 | CommentService.config[:enable_search] = true 8 | 9 | # Delete any previously created index to ensure our search tests start 10 | # with a clean slate. Each test will recreate the index. 11 | TaskHelpers::ElasticsearchHelper.delete_indices 12 | end 13 | 14 | after(:each) do 15 | # Delete the index after each test so it will be re-created. 16 | TaskHelpers::ElasticsearchHelper.delete_indices 17 | end 18 | 19 | after(:all) do 20 | # Ensure that subsequent tests, that do not require search, are unaffected by search. 21 | CommentService.config[:enable_search] = false 22 | 23 | # Ensure (once more) the index was deleted. 24 | TaskHelpers::ElasticsearchHelper.delete_indices 25 | end 26 | 27 | end 28 | 29 | RSpec.configure do |config| 30 | 31 | config.before(:suite) do 32 | CommentService.config[:enable_search] = false 33 | end 34 | 35 | config.before(:each) do 36 | # Create the index before each test if it doesn't exist. 37 | TaskHelpers::ElasticsearchHelper.initialize_indices(true) 38 | end 39 | 40 | config.after(:each) do 41 | Elasticsearch::Model.client.indices.delete(index: "_all", ignore_unavailable: true) 42 | end 43 | 44 | end 45 | -------------------------------------------------------------------------------- /spec/support/factory_bot.rb: -------------------------------------------------------------------------------- 1 | require 'factory_bot' 2 | 3 | RSpec.configure do |config| 4 | config.include FactoryBot::Syntax::Methods 5 | 6 | FactoryBot.find_definitions 7 | 8 | config.before(:suite) do 9 | MongoidCleaner.cleaning do 10 | FactoryBot.lint 11 | end 12 | end 13 | end 14 | -------------------------------------------------------------------------------- /spec/support/matchers.rb: -------------------------------------------------------------------------------- 1 | require 'rspec/expectations' 2 | 3 | RSpec::Matchers.define :be_an_empty_response do 4 | match do |actual| 5 | actual.body == '{}' 6 | end 7 | end 8 | 9 | RSpec::Matchers.define :exit_with_code do |exp_code| 10 | supports_block_expectations 11 | actual = nil 12 | 13 | match do |block| 14 | begin 15 | block.call 16 | rescue SystemExit => e 17 | actual = e.status 18 | end 19 | actual and actual == exp_code 20 | end 21 | failure_message do |block| 22 | "expected block to call exit(#{exp_code}) but exit" + 23 | (actual.nil? ? " not called" : "(#{actual}) was called") 24 | end 25 | failure_message_when_negated do |block| 26 | "expected block not to call exit(#{exp_code})" 27 | end 28 | description do 29 | "expect block to call exit(#{exp_code})" 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /spec/support/rake.rb: -------------------------------------------------------------------------------- 1 | require "rake" 2 | 3 | shared_context "rake" do 4 | let(:rake) { Rake::Application.new } 5 | let(:task_name) { self.class.top_level_description } 6 | let(:task_path) { "lib/tasks/#{task_name.split(":").first}" } 7 | 8 | subject { rake[task_name] } 9 | 10 | def loaded_files_excluding_current_rake_file 11 | $".reject {|file| file == File.absolute_path("#{task_path}.rake").to_s } 12 | end 13 | 14 | before do 15 | Rake.application = rake 16 | Rake.application.rake_require(task_path, [Rake.application.original_dir], loaded_files_excluding_current_rake_file) 17 | Rake::Task.define_task(:environment) 18 | end 19 | end -------------------------------------------------------------------------------- /spec/unicode_shared_examples.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | shared_examples "unicode data" do 4 | it "can handle ASCII data" do 5 | test_unicode_data("This post contains ASCII.") 6 | end 7 | 8 | it "can handle Latin-1 data" do 9 | test_unicode_data("Thís pøst çòñtáins Lätin-1 tæxt") 10 | end 11 | 12 | it "can handle CJK data" do 13 | test_unicode_data("イんノ丂 アo丂イ co刀イムノ刀丂 cフズ") 14 | end 15 | 16 | it "can handle non-BMP data" do 17 | test_unicode_data("𝕋𝕙𝕚𝕤 𝕡𝕠𝕤𝕥 𝕔𝕠𝕟𝕥𝕒𝕚𝕟𝕤 𝕔𝕙𝕒𝕣𝕒𝕔𝕥𝕖𝕣𝕤 𝕠𝕦𝕥𝕤𝕚𝕕𝕖 𝕥𝕙𝕖 𝔹𝕄ℙ") 18 | end 19 | 20 | it "can handle special chars" do 21 | test_unicode_data( 22 | "\" This , post > contains < delimiter ] and [ other } " + 23 | "special { characters ; that & may ' break things" 24 | ) 25 | end 26 | 27 | it "can handle string interpolation syntax" do 28 | test_unicode_data("This string contains %s string interpolation #\{syntax}") 29 | end 30 | end 31 | --------------------------------------------------------------------------------