├── mounted ├── sender_api │ └── .geetkeep └── logstash │ └── logstash.conf ├── .gitignore ├── README.md ├── .env-to-rename └── docker-compose.yaml /mounted/sender_api/.geetkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .env 2 | .idea/ 3 | /mounted/sender_api/session.txt -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Родительский репозиторий для автоматизации запуска backend и sender части приложения TelegramForwarder 2 | 3 | Приложение предназначено для "вылавливания" сообщений по определенным словам из публичных групп. 4 | Для того чтобы получать сообщения, нужно добавить через backend (через бота) подписку на публичную группу. 5 | 6 | Разворачиваем на хостинге: 7 | 8 | - git clone https://github.com/lashnag/telegram-forwarder.git 9 | - добавляем свой файл .env с настройками окружения 10 | - git clone https://github.com/lashnag/telegram-client-sender-api.git 11 | - устанавливаем python 12 | - cd SenderApi 13 | - pip install -r requirements.txt 14 | - cd app 15 | - python save_session.py 16 | - Вводим Api Id, Api Hash, телефон и код подтверждения 17 | - Ищем папку mounted и копируем из нее файл session.txt в папку первого репозитория в /mounted/sender_api/ 18 | - В папке первого репозитория запускаем docker-compose up -------------------------------------------------------------------------------- /.env-to-rename: -------------------------------------------------------------------------------- 1 | POSTGRES_USER=test 2 | POSTGRES_PASSWORD=test 3 | POSTGRES_DB=community_bot 4 | POSTGRES_PATH=database 5 | POSTGRES_PORT=9999 6 | FORWARDER_BACKEND_BOT_SECRET_KEY=bot_secret_key 7 | 8 | API_ID=API_ID_FROM_MY.TELEGRAM.ORG 9 | API_HASH=API_HASH_FROM_MY.TELEGRAM.ORG 10 | PHONE_NUMBER=TELEGRAM_ACCOUNT_PHONE_NUMBER 11 | 12 | KIBANA_PORT=elastic_port 13 | ELASTIC_USERNAME=elastic_username 14 | ELASTIC_PASSWORD=elastic_password 15 | 16 | BACKEND_BASIC_AUTH_USER=admin 17 | BACKEND_BASIC_AUTH_PASSWORD=admin 18 | 19 | GRAFANA_PORT=8888 20 | GRAFANA_USERNAME=admin 21 | GRAFANA_PASSWORD=admin 22 | 23 | DEEP_SEEK_API_TOKEN="see https://platform.deepseek.com/usage" 24 | 25 | YANDEX_GPT_RESOURCE="see http://console.yandex.cloud/cloud/" 26 | YANDEX_GPT_API_TOKEN="see http://console.yandex.cloud/cloud/" 27 | 28 | GIGA_CHAT_ACCESS_TOKEN="see https://developers.sber.ru/studio/workspaces/" 29 | 30 | DOMAIN=https://your-domain.com 31 | PROD_MODE=true 32 | REMOTE_LOGGER=false -------------------------------------------------------------------------------- /mounted/logstash/logstash.conf: -------------------------------------------------------------------------------- 1 | input { 2 | tcp { 3 | port => 5022 4 | codec => json_lines 5 | } 6 | } 7 | 8 | output { 9 | if [application] == "forwarder-backend" { 10 | elasticsearch { 11 | hosts => ["http://elasticsearch:9200"] 12 | index => "forwarder-backend-logs-%{+YYYY.MM.dd}" 13 | user => "${ELASTIC_USERNAME}" 14 | password => "${ELASTIC_PASSWORD}" 15 | } 16 | } else if [application] == "sender-api" { 17 | elasticsearch { 18 | hosts => ["http://elasticsearch:9200"] 19 | index => "sender-api-logs-%{+YYYY.MM.dd}" 20 | user => "${ELASTIC_USERNAME}" 21 | password => "${ELASTIC_PASSWORD}" 22 | } 23 | } else if [application] == "lemmatizer" { 24 | elasticsearch { 25 | hosts => ["http://elasticsearch:9200"] 26 | index => "lemmatizer-logs-%{+YYYY.MM.dd}" 27 | user => "${ELASTIC_USERNAME}" 28 | password => "${ELASTIC_PASSWORD}" 29 | } 30 | } 31 | else if [application] == "telegram-messages" { 32 | elasticsearch { 33 | hosts => ["http://elasticsearch:9200"] 34 | index => "telegram-messages-%{+YYYY.MM.dd}" 35 | user => "${ELASTIC_USERNAME}" 36 | password => "${ELASTIC_PASSWORD}" 37 | } 38 | } else if [application] == "ocr" { 39 | elasticsearch { 40 | hosts => ["http://elasticsearch:9200"] 41 | index => "ocr-logs-%{+YYYY.MM.dd}" 42 | user => "${ELASTIC_USERNAME}" 43 | password => "${ELASTIC_PASSWORD}" 44 | } 45 | } else if [application] == "entities-extractor" { 46 | elasticsearch { 47 | hosts => ["http://elasticsearch:9200"] 48 | index => "entities-extractor-%{+YYYY.MM.dd}" 49 | user => "${ELASTIC_USERNAME}" 50 | password => "${ELASTIC_PASSWORD}" 51 | } 52 | } 53 | stdout { 54 | codec => rubydebug 55 | } 56 | } -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | database: 3 | image: postgres:15 4 | container_name: forwarder_backend_database 5 | restart: unless-stopped 6 | env_file: 7 | - .env 8 | ports: 9 | - "${POSTGRES_PORT}:5432" 10 | volumes: 11 | - postgres_data:/var/lib/postgresql/data 12 | healthcheck: 13 | test: [ "CMD-SHELL", "pg_isready -U ${POSTGRES_USER}" ] 14 | interval: 30s 15 | timeout: 10s 16 | retries: 5 17 | backend: 18 | image: lashnag/forwarder_backend_application:2.0.8 19 | container_name: forwarder_backend_application 20 | restart: unless-stopped 21 | ports: 22 | - "8080:8080" 23 | env_file: 24 | - .env 25 | depends_on: 26 | database: 27 | condition: service_healthy 28 | # logstash: 29 | # condition: service_healthy 30 | lemmatizer: 31 | condition: service_healthy 32 | ocr: 33 | condition: service_healthy 34 | entities-extractor: 35 | condition: service_healthy 36 | healthcheck: 37 | test: ["CMD", "curl", "-f", "-u", "${BACKEND_BASIC_AUTH_USER}:${BACKEND_BASIC_AUTH_PASSWORD}", "http://localhost:8080/actuator/health"] 38 | interval: 30s 39 | timeout: 10s 40 | retries: 5 41 | sender: 42 | image: lashnag/sender_api:2.1.0 43 | container_name: sender_api 44 | restart: unless-stopped 45 | env_file: 46 | - .env 47 | # depends_on: 48 | # logstash: 49 | # condition: service_healthy 50 | volumes: 51 | - ./mounted/sender_api:/sender_api/mounted 52 | lemmatizer: 53 | image: lashnag/lemmatizer:1.2.1 54 | container_name: lemmatizer 55 | restart: unless-stopped 56 | env_file: 57 | - .env 58 | # depends_on: 59 | # logstash: 60 | # condition: service_healthy 61 | healthcheck: 62 | test: ["CMD", "curl", "-f", "http://localhost:4355/healthcheck"] 63 | interval: 30s 64 | timeout: 10s 65 | retries: 5 66 | ocr: 67 | image: lashnag/ocr:1.2.1 68 | container_name: ocr 69 | restart: unless-stopped 70 | env_file: 71 | - .env 72 | # depends_on: 73 | # logstash: 74 | # condition: service_healthy 75 | healthcheck: 76 | test: [ "CMD", "curl", "-f", "http://localhost:4366/healthcheck" ] 77 | interval: 30s 78 | timeout: 10s 79 | retries: 5 80 | entities-extractor: 81 | image: lashnag/entities-extractor:2.1.1 82 | container_name: entities-extractor 83 | restart: unless-stopped 84 | env_file: 85 | - .env 86 | # depends_on: 87 | # logstash: 88 | # condition: service_healthy 89 | healthcheck: 90 | test: [ "CMD", "curl", "-f", "http://localhost:4319/healthcheck" ] 91 | interval: 30s 92 | timeout: 10s 93 | retries: 5 94 | # elasticsearch: 95 | # image: elasticsearch:7.17.0 96 | # container_name: elasticsearch 97 | # restart: unless-stopped 98 | # environment: 99 | # - discovery.type=single-node 100 | # - ES_JAVA_OPTS=-Xms512m -Xmx512m 101 | # - xpack.security.enabled=true 102 | # - ELASTIC_USERNAME=${ELASTIC_USERNAME} 103 | # - ELASTIC_PASSWORD=${ELASTIC_PASSWORD} 104 | # volumes: 105 | # - elastic_search_data:/usr/share/elasticsearch/data 106 | # env_file: 107 | # - .env 108 | # healthcheck: 109 | # test: [ "CMD", "curl", "-u", "${ELASTIC_USERNAME}:${ELASTIC_PASSWORD}", "http://localhost:9200/_cluster/health?wait_for_status=yellow&timeout=50s" ] 110 | # interval: 5s 111 | # timeout: 10s 112 | # retries: 15 113 | # logstash: 114 | # image: logstash:7.17.0 115 | # container_name: logstash 116 | # restart: unless-stopped 117 | # environment: 118 | # - XPACK_SECURITY_ENABLED=false 119 | # - XPACK_MONITORING_ENABLED=false 120 | # depends_on: 121 | # elasticsearch: 122 | # condition: service_healthy 123 | # volumes: 124 | # - ./mounted/logstash/logstash.conf:/usr/share/logstash/pipeline/logstash.conf 125 | # env_file: 126 | # - .env 127 | # healthcheck: 128 | # test: [ "CMD", "curl", "--silent", "--fail", "http://localhost:9600/_node/stats" ] 129 | # interval: 5s 130 | # timeout: 10s 131 | # retries: 15 132 | # kibana: 133 | # image: kibana:7.17.0 134 | # container_name: kibana 135 | # restart: unless-stopped 136 | # ports: 137 | # - "${KIBANA_PORT}:5601" 138 | # depends_on: 139 | # elasticsearch: 140 | # condition: service_healthy 141 | # environment: 142 | # - ELASTICSEARCH_HOSTS=http://elasticsearch:9200 143 | # - ELASTICSEARCH_USERNAME=${ELASTIC_USERNAME} 144 | # - ELASTICSEARCH_PASSWORD=${ELASTIC_PASSWORD} 145 | # env_file: 146 | # - .env 147 | # grafana: 148 | # image: grafana/grafana:11.5.1 149 | # container_name: grafana 150 | # restart: unless-stopped 151 | # ports: 152 | # - "${GRAFANA_PORT}:3000" 153 | # depends_on: 154 | # - elasticsearch 155 | # environment: 156 | # - GF_SECURITY_ADMIN_USER=${GRAFANA_USERNAME} 157 | # - GF_SECURITY_ADMIN_PASSWORD=${GRAFANA_PASSWORD} 158 | # - GF_SERVER_ROOT_URL=${DOMAIN}:${GRAFANA_PORT} 159 | # volumes: 160 | # - grafana:/var/lib/grafana 161 | volumes: 162 | postgres_data: 163 | # elastic_search_data: 164 | # grafana: 165 | --------------------------------------------------------------------------------