├── CODEOWNERS ├── nab-web └── src │ ├── test │ ├── resources │ │ └── META-INF │ │ │ └── services │ │ │ └── ch.qos.logback.classic.spi.Configurator │ └── java │ │ └── ru │ │ └── hh │ │ └── nab │ │ └── web │ │ ├── jersey │ │ ├── resolver │ │ │ ├── variants │ │ │ │ ├── GenericCache.java │ │ │ │ ├── PartiallyOverflowingCacheOptional.java │ │ │ │ ├── PartiallyOverflowingCache.java │ │ │ │ ├── PartiallyOverflowingCacheWithSizeCache.java │ │ │ │ └── PartiallyOverflowingCacheWithSizeAtomicCache.java │ │ │ └── TestResource.java │ │ └── filter │ │ │ └── cache │ │ │ └── CacheFilterTest.java │ │ ├── NabWebTestLogbackBaseConfigurator.java │ │ ├── http │ │ └── CacheUtilsTest.java │ │ ├── NabWebTestConfig.java │ │ └── resource │ │ └── StatusResourceTest.java │ └── main │ └── java │ └── ru │ └── hh │ └── nab │ └── web │ ├── http │ ├── RequestInfo.java │ ├── CacheUtils.java │ ├── HttpStatus.java │ └── RequestContext.java │ ├── jersey │ ├── resolver │ │ ├── CharacterEscapeBase.java │ │ ├── ObjectMapperContextResolver.java │ │ ├── JsonCharacterEscapes.java │ │ ├── PartiallyOverflowingCache.java │ │ └── XmlEscapeHandler.java │ ├── filter │ │ ├── cache │ │ │ ├── Serializer.java │ │ │ ├── Header.java │ │ │ └── CachingOutputStream.java │ │ └── ErrorAcceptFilter.java │ ├── NabPriorities.java │ └── interceptor │ │ └── SentryAppenderInterceptor.java │ ├── exceptions │ ├── NabMappableException.java │ ├── ExceptionSerializer.java │ ├── MappableExceptionUtils.java │ ├── ExecutionExceptionMapper.java │ ├── CompletionExceptionMapper.java │ ├── SecurityExceptionMapper.java │ ├── ServerTimeoutExceptionMapper.java │ ├── IllegalStateExceptionMapper.java │ ├── IllegalArgumentExceptionMapper.java │ ├── UnwrappingExceptionMapper.java │ ├── InsufficientTimeoutExceptionMapper.java │ └── NotFoundExceptionMapper.java │ ├── resource │ └── StatusResource.java │ └── servlet │ └── filter │ └── CommonHeadersFilter.java ├── nab-consul ├── src │ └── main │ │ └── java │ │ └── ru │ │ └── hh │ │ └── nab │ │ └── consul │ │ ├── HostPort.java │ │ ├── HostsFetcher.java │ │ ├── ConsulTagsSupplier.java │ │ ├── ConsulServiceException.java │ │ └── ConsulFetcher.java └── pom.xml ├── nab-metrics └── src │ ├── main │ └── java │ │ └── ru │ │ └── hh │ │ └── nab │ │ └── metrics │ │ ├── Tags.java │ │ ├── SimpleHistogram.java │ │ ├── RangedHistogram.java │ │ ├── MultiTags.java │ │ ├── CompactHistogram.java │ │ ├── Max.java │ │ ├── AdjustingHistograms.java │ │ ├── TaggedSender.java │ │ ├── Tag.java │ │ ├── UniformHistogram.java │ │ └── executor │ │ └── ThreadDiagnosticRejectedExecutionHandler.java │ └── test │ └── java │ └── ru │ └── hh │ └── nab │ └── metrics │ ├── TestUtils.java │ ├── RangedHistogramTest.java │ ├── CompactHistogramTest.java │ └── UniformHistogramTest.java ├── nab-hibernate └── src │ ├── main │ ├── resources │ │ └── META-INF │ │ │ └── services │ │ │ └── org.hibernate.boot.spi.SessionFactoryBuilderFactory │ └── java │ │ └── ru │ │ └── hh │ │ └── nab │ │ └── hibernate │ │ ├── service │ │ ├── ServiceSupplier.java │ │ └── NabServiceContributor.java │ │ ├── events │ │ ├── EventListenerRegistryConsumer.java │ │ └── EventListenerRegistryPropagator.java │ │ ├── monitoring │ │ ├── HibernateMetrics.java │ │ └── HibernateStatisticsSender.java │ │ ├── interceptor │ │ ├── RequestIdPassingInterceptor.java │ │ └── ControllerPassingInterceptor.java │ │ ├── adapter │ │ ├── NabHibernateJpaVendorAdapter.java │ │ └── NabHibernatePersistenceProvider.java │ │ └── util │ │ └── SqlRequestIdCommenter.java │ └── test │ ├── resources │ └── hibernate-test.properties │ └── java │ └── ru │ └── hh │ └── nab │ └── hibernate │ ├── util │ └── SqlRequestIdCommenterTest.java │ └── interceptor │ └── ControllerPassingInterceptorTest.java ├── .hh-release.yaml ├── .gitignore ├── nab-telemetry-jdbc ├── src │ ├── main │ │ └── java │ │ │ └── ru │ │ │ └── hh │ │ │ └── nab │ │ │ └── telemetry │ │ │ └── jdbc │ │ │ ├── internal │ │ │ ├── model │ │ │ │ ├── JdbcOperationKind.java │ │ │ │ ├── NabDbInfo.java │ │ │ │ ├── NabDbRequest.java │ │ │ │ └── NabDataSourceInfo.java │ │ │ ├── extractor │ │ │ │ ├── ConnectionSpanNameExtractor.java │ │ │ │ ├── NabJdbcOperationKindExtractor.java │ │ │ │ ├── DataSourceNameExtractor.java │ │ │ │ └── NabDataSourceInfoExtractor.java │ │ │ └── getter │ │ │ │ ├── ConnectionCodeAttributesGetter.java │ │ │ │ ├── ExtendedNameJdbcAttributesGetter.java │ │ │ │ ├── NabJdbcNetAttributesGetter.java │ │ │ │ └── NabJdbcAttributesGetter.java │ │ │ ├── NabTelemetryJdbcExtension.java │ │ │ └── NabTelemetryDataSourceFactory.java │ └── test │ │ └── java │ │ └── ru │ │ └── hh │ │ └── nab │ │ └── telemetry │ │ └── jdbc │ │ └── internal │ │ └── extractor │ │ └── ConnectionSpanNameExtractorTest.java └── pom.xml ├── nab-kafka └── src │ ├── main │ └── java │ │ └── ru │ │ └── hh │ │ └── nab │ │ └── kafka │ │ ├── consumer │ │ ├── MessageProcessor.java │ │ ├── KafkaConsumerFactory.java │ │ ├── AckProvider.java │ │ ├── DeserializerSupplier.java │ │ ├── retry │ │ │ ├── policy │ │ │ │ ├── Never.java │ │ │ │ ├── Deadline.java │ │ │ │ ├── Fixed.java │ │ │ │ ├── Progressive.java │ │ │ │ ├── RetryLimit.java │ │ │ │ └── Ttl.java │ │ │ ├── RetryPolicyResolver.java │ │ │ └── MessageProcessingHistory.java │ │ ├── SeekPosition.java │ │ ├── DeadLetterQueue.java │ │ ├── ClusterMetadataProvider.java │ │ ├── ConsumerMetadata.java │ │ ├── PartialAck.java │ │ └── FailFastDefaultKafkaConsumerFactory.java │ │ ├── producer │ │ ├── SerializerSupplier.java │ │ ├── KafkaSendResult.java │ │ ├── KafkaProducer.java │ │ └── DefaultKafkaProducer.java │ │ ├── exception │ │ ├── NabKafkaException.java │ │ └── ConfigurationException.java │ │ ├── serialization │ │ ├── JacksonDeserializerSupplier.java │ │ └── JacksonSerializerSupplier.java │ │ └── util │ │ └── AckUtils.java │ └── test │ └── java │ └── ru │ └── hh │ └── nab │ └── kafka │ └── consumer │ ├── TopicConsumerMock.java │ ├── retry │ └── RetryTopicsTest.java │ ├── ClusterMetadataProviderTest.java │ ├── TopicPartitionsMonitoringTest.java │ └── KafkaConsumerFactoryTest.java ├── nab-data-source └── src │ ├── main │ └── java │ │ └── ru │ │ └── hh │ │ └── nab │ │ └── datasource │ │ ├── healthcheck │ │ ├── HealthCheckDataSource.java │ │ ├── HealthCheck.java │ │ ├── UnhealthyDataSourceException.java │ │ └── HealthCheckHikariDataSourceFactory.java │ │ ├── ext │ │ ├── JdbcExtension.java │ │ └── OpenTelemetryJdbcExtension.java │ │ ├── DataSourceType.java │ │ ├── monitoring │ │ ├── MetricsTrackerFactoryProvider.java │ │ ├── NabMetricsTrackerFactoryProvider.java │ │ ├── ConnectionPoolMetrics.java │ │ └── StatementTimeoutDataSource.java │ │ ├── aspect │ │ ├── ExecuteOnDataSourceWrappedException.java │ │ ├── ExecuteOnDataSourceTransactionCallbackFactory.java │ │ └── TransactionalAspect.java │ │ ├── validation │ │ └── DataSourcesReadyTarget.java │ │ ├── transaction │ │ └── TransactionalScope.java │ │ ├── NamedDataSource.java │ │ ├── annotation │ │ ├── ExecuteOnDataSource.java │ │ └── DataSourceCacheMode.java │ │ └── routing │ │ └── RoutingDataSourceFactory.java │ └── test │ └── java │ └── ru │ └── hh │ └── nab │ └── datasource │ └── TransactionAssertions.java ├── nab-jpa ├── src │ └── main │ │ └── java │ │ └── ru │ │ └── hh │ │ └── nab │ │ └── jpa │ │ ├── JpaPropertiesProvider.java │ │ ├── EntityManagerFactoryCreationHandler.java │ │ ├── EntityManagerFactoryId.java │ │ ├── NabPersistenceUnitPostProcessor.java │ │ ├── MappingConfig.java │ │ ├── aspect │ │ └── ExecuteOnDataSourceTransactionCallbackFactoryImpl.java │ │ └── NabEntityManagerFactoryBean.java └── pom.xml ├── .github └── PULL_REQUEST_TEMPLATE.md ├── catalog-info.yaml ├── nab-logging └── src │ ├── main │ ├── resources │ │ └── shared-pattern-logback.xml │ └── java │ │ └── ru │ │ └── hh │ │ └── nab │ │ └── logging │ │ ├── ListAppender.java │ │ ├── json │ │ ├── NabJsonLayout.java │ │ ├── NabTSOnlyJsonLayout.java │ │ ├── JsonFieldNames.java │ │ ├── NabTSOnlyJsonEncoder.java │ │ ├── NabJsonEncoder.java │ │ └── AppenderNameJsonProvider.java │ │ └── override │ │ ├── SkipLogLevelOverrideException.java │ │ └── LogLevelOverrideExtension.java │ └── test │ └── java │ └── ru │ └── hh │ └── nab │ └── logging │ ├── NabLoggingConfiguratorTemplateTest.java │ └── ListAppenderTest.java ├── .travis.yml ├── .forgejo └── workflows │ └── security_scan.yml ├── nab-testbase └── src │ ├── main │ └── java │ │ └── ru │ │ └── hh │ │ └── nab │ │ └── testbase │ │ ├── hibernate │ │ └── HibernateTestBase.java │ │ ├── jpa │ │ └── JpaTestBase.java │ │ ├── web │ │ └── WebTestBase.java │ │ ├── kafka │ │ ├── NoopKafkaConsumerFactory.java │ │ └── NoopKafkaConsumer.java │ │ ├── redis │ │ └── EmbeddedRedisFactory.java │ │ └── transaction │ │ └── TransactionTestBase.java │ └── test │ └── java │ └── ru │ └── hh │ └── nab │ └── testbase │ └── kafka │ └── NoopKafkaConsumerFactoryTest.java ├── nab-common ├── src │ ├── main │ │ └── java │ │ │ └── ru │ │ │ └── hh │ │ │ └── nab │ │ │ └── common │ │ │ ├── constants │ │ │ ├── RequestAttributes.java │ │ │ └── RequestHeaders.java │ │ │ ├── qualifier │ │ │ └── NamedQualifier.java │ │ │ ├── mdc │ │ │ ├── MDCTraceContextListener.java │ │ │ └── MDC.java │ │ │ ├── util │ │ │ └── ExceptionUtils.java │ │ │ ├── exception │ │ │ └── LoggingUncaughtExceptionHandler.java │ │ │ └── servlet │ │ │ └── ServletFilterPriorities.java │ └── test │ │ └── java │ │ └── ru │ │ └── hh │ │ └── nab │ │ └── common │ │ ├── servlet │ │ └── UriComponentTest.java │ │ ├── mdc │ │ └── MDCTest.java │ │ └── util │ │ └── ExceptionUtilsTest.java └── pom.xml ├── nab-telemetry └── src │ ├── main │ └── java │ │ └── ru │ │ └── hh │ │ └── nab │ │ └── telemetry │ │ ├── TelemetryProcessorFactory.java │ │ ├── IdGeneratorImpl.java │ │ └── ContextStorage.java │ └── test │ └── java │ └── ru │ └── hh │ └── nab │ └── telemetry │ ├── TestResource.java │ └── TestResourceWithSubResource.java ├── nab-sentry ├── src │ └── main │ │ └── java │ │ └── ru │ │ └── hh │ │ └── nab │ │ └── sentry │ │ ├── SentryTraceContextListener.java │ │ ├── SentryEventProcessor.java │ │ ├── SentryInitializer.java │ │ └── SentryScopeConfigurator.java └── pom.xml ├── nab-jclient └── src │ ├── main │ └── java │ │ └── ru │ │ └── hh │ │ └── nab │ │ └── jclient │ │ └── metrics │ │ └── MetricsConsumerFactory.java │ └── test │ └── java │ └── ru │ └── hh │ └── nab │ └── jclient │ ├── UriCompactionUtilTest.java │ └── JClientContextProviderFilterTest.java ├── nab-telemetry-kafka ├── pom.xml └── src │ └── main │ └── java │ └── ru │ └── hh │ └── nab │ └── telemetry │ └── TelemetryAwareProducerFactory.java └── README.md /CODEOWNERS: -------------------------------------------------------------------------------- 1 | # team_mm_public_channel: ~platform 2 | * @hhru/platform 3 | -------------------------------------------------------------------------------- /nab-web/src/test/resources/META-INF/services/ch.qos.logback.classic.spi.Configurator: -------------------------------------------------------------------------------- 1 | ru.hh.nab.web.NabWebTestLogbackBaseConfigurator 2 | -------------------------------------------------------------------------------- /nab-consul/src/main/java/ru/hh/nab/consul/HostPort.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.consul; 2 | 3 | public record HostPort(String host, int port) { 4 | } 5 | -------------------------------------------------------------------------------- /nab-metrics/src/main/java/ru/hh/nab/metrics/Tags.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.metrics; 2 | 3 | abstract class Tags { 4 | abstract Tag[] getTags(); 5 | } 6 | -------------------------------------------------------------------------------- /nab-hibernate/src/main/resources/META-INF/services/org.hibernate.boot.spi.SessionFactoryBuilderFactory: -------------------------------------------------------------------------------- 1 | ru.hh.nab.hibernate.NabSessionFactoryBuilderFactory 2 | -------------------------------------------------------------------------------- /nab-hibernate/src/test/resources/hibernate-test.properties: -------------------------------------------------------------------------------- 1 | hibernate.dialect=org.hibernate.dialect.PostgreSQLDialect 2 | hibernate.hbm2ddl.auto=create 3 | hibernate.show_sql=false 4 | hibernate.format_sql=false 5 | -------------------------------------------------------------------------------- /.hh-release.yaml: -------------------------------------------------------------------------------- 1 | version: 22.0.4 2 | build_image: registry.pyn.ru/openjdk17-building:2024.05.01 3 | type: library 4 | build_method: mvn_build 5 | deploy_method: mvn_deploy 6 | update_method: mvn_update 7 | changelog: true 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | *~ 3 | #* 4 | *.iws 5 | *.ipr 6 | *.iml 7 | .idea 8 | .settings 9 | .project 10 | */.classpath 11 | nb-configuration.xml 12 | dependency-reduced-pom.xml 13 | *.properties.dev 14 | *.log 15 | logs 16 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/src/main/java/ru/hh/nab/telemetry/jdbc/internal/model/JdbcOperationKind.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry.jdbc.internal.model; 2 | 3 | public enum JdbcOperationKind { 4 | CONNECTION, 5 | STATEMENT 6 | } 7 | -------------------------------------------------------------------------------- /nab-consul/src/main/java/ru/hh/nab/consul/HostsFetcher.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.consul; 2 | 3 | import java.util.Set; 4 | 5 | public interface HostsFetcher { 6 | 7 | Set fetchHostsByName(String serviceName); 8 | 9 | } 10 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/MessageProcessor.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer; 2 | 3 | @FunctionalInterface 4 | public interface MessageProcessor { 5 | void process(M message) throws InterruptedException; 6 | } 7 | -------------------------------------------------------------------------------- /nab-consul/src/main/java/ru/hh/nab/consul/ConsulTagsSupplier.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.consul; 2 | 3 | import java.util.List; 4 | import java.util.function.Supplier; 5 | 6 | public interface ConsulTagsSupplier extends Supplier> { 7 | } 8 | -------------------------------------------------------------------------------- /nab-metrics/src/test/java/ru/hh/nab/metrics/TestUtils.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.metrics; 2 | 3 | class TestUtils { 4 | static Tags tagsOf(Tag... tags) { 5 | return new MultiTags(tags); 6 | } 7 | 8 | private TestUtils(){} 9 | } 10 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/healthcheck/HealthCheckDataSource.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.healthcheck; 2 | 3 | public interface HealthCheckDataSource { 4 | String getDataSourceName(); 5 | HealthCheck getHealthCheck(); 6 | } 7 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/KafkaConsumerFactory.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer; 2 | 3 | 4 | public interface KafkaConsumerFactory { 5 | 6 | ConsumerBuilder builder(String topicName, Class messageClass); 7 | 8 | } 9 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/healthcheck/HealthCheck.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.healthcheck; 2 | 3 | public abstract class HealthCheck extends com.codahale.metrics.health.HealthCheck { 4 | @Override 5 | public abstract Result check(); 6 | } 7 | -------------------------------------------------------------------------------- /nab-jpa/src/main/java/ru/hh/nab/jpa/JpaPropertiesProvider.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.jpa; 2 | 3 | import java.util.Properties; 4 | import java.util.function.Supplier; 5 | 6 | @FunctionalInterface 7 | public interface JpaPropertiesProvider extends Supplier { 8 | } 9 | -------------------------------------------------------------------------------- /nab-consul/src/main/java/ru/hh/nab/consul/ConsulServiceException.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.consul; 2 | 3 | public class ConsulServiceException extends RuntimeException { 4 | public ConsulServiceException(String message, Throwable cause) { 5 | super(message, cause); 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/ext/JdbcExtension.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.ext; 2 | 3 | import javax.sql.DataSource; 4 | 5 | @FunctionalInterface 6 | public interface JdbcExtension { 7 | 8 | DataSource wrap(DataSource dataSourceToWrap); 9 | } 10 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/producer/SerializerSupplier.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.producer; 2 | 3 | import org.apache.kafka.common.serialization.Serializer; 4 | 5 | @FunctionalInterface 6 | public interface SerializerSupplier { 7 | 8 | Serializer supply(); 9 | } 10 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | > [!IMPORTANT] 2 | > Добавь в описание PR changelog в формате: 3 | 4 | - [ ] **version change** : 5 | - [ ] **description**: 6 | - [ ] **requires_changes_in_hh** : 7 | - [ ] **instructions** : 8 | -------------------------------------------------------------------------------- /catalog-info.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: backstage.io/v1alpha1 3 | kind: Component 4 | metadata: 5 | name: nuts-and-bolts 6 | annotations: 7 | github.com/project-slug: hhru/nuts-and-bolts 8 | tags: 9 | - backend 10 | - java 11 | spec: 12 | type: library 13 | lifecycle: production 14 | -------------------------------------------------------------------------------- /nab-hibernate/src/main/java/ru/hh/nab/hibernate/service/ServiceSupplier.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.hibernate.service; 2 | 3 | import java.util.function.Supplier; 4 | import org.hibernate.service.Service; 5 | 6 | public interface ServiceSupplier extends Supplier { 7 | Class getClazz(); 8 | } 9 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/AckProvider.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer; 2 | 3 | import org.apache.kafka.clients.consumer.Consumer; 4 | 5 | @FunctionalInterface 6 | interface AckProvider { 7 | Ack createAck(KafkaConsumer kafkaConsumer, Consumer nativeKafkaConsumer); 8 | } 9 | -------------------------------------------------------------------------------- /nab-web/src/test/java/ru/hh/nab/web/jersey/resolver/variants/GenericCache.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.resolver.variants; 2 | 3 | import java.util.function.Function; 4 | 5 | public interface GenericCache { 6 | public V computeIfAbsent(K key, Function mappingFunction); 7 | } 8 | 9 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/DeserializerSupplier.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer; 2 | 3 | import org.apache.kafka.common.serialization.Deserializer; 4 | 5 | @FunctionalInterface 6 | public interface DeserializerSupplier { 7 | 8 | Deserializer supplyFor(Class clazz); 9 | 10 | } 11 | -------------------------------------------------------------------------------- /nab-hibernate/src/main/java/ru/hh/nab/hibernate/events/EventListenerRegistryConsumer.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.hibernate.events; 2 | 3 | import java.util.function.Consumer; 4 | import org.hibernate.event.service.spi.EventListenerRegistry; 5 | 6 | public interface EventListenerRegistryConsumer extends Consumer { 7 | } 8 | -------------------------------------------------------------------------------- /nab-logging/src/main/resources/shared-pattern-logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: java 2 | jdk: 3 | - openjdk11 4 | script: "mvn -P codecoverage,!sonar clean test sonar:sonar" 5 | after_success: 6 | - bash <(curl -s https://codecov.io/bash) 7 | 8 | addons: 9 | sonarcloud: 10 | organization: "hhru" 11 | 12 | cache: 13 | directories: 14 | - '$HOME/.m2/repository' 15 | - '$HOME/.sonar/cache' 16 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/DataSourceType.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource; 2 | 3 | public final class DataSourceType { 4 | public static final String MASTER = "master"; 5 | public static final String READONLY = "readonly"; 6 | public static final String SLOW = "slow"; 7 | 8 | private DataSourceType() { 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /.forgejo/workflows/security_scan.yml: -------------------------------------------------------------------------------- 1 | on: [pull_request] 2 | 3 | jobs: 4 | Security_pipeline: 5 | runs-on: k8s-security-scan 6 | container: 7 | image: ${{vars.DEVSECOPS_IMAGE_CURLJQ}} 8 | steps: 9 | - uses: https://${{secrets.DEVSECOPS_FORGEJO_API_TOKEN}}@forgejo.pyn.ru/hhru/devsecops-security-scan/multiaction/@master -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/http/RequestInfo.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.http; 2 | 3 | public final class RequestInfo { 4 | public static final String CACHE_ATTRIBUTE = "HttpCache"; 5 | public static final String HIT = "HIT"; 6 | public static final String MISS = "MISS"; 7 | public static final String NO_CACHE = "-"; 8 | 9 | private RequestInfo() {} 10 | } 11 | -------------------------------------------------------------------------------- /nab-testbase/src/main/java/ru/hh/nab/testbase/hibernate/HibernateTestBase.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.testbase.hibernate; 2 | 3 | import jakarta.inject.Inject; 4 | import org.hibernate.Session; 5 | import ru.hh.nab.testbase.transaction.TransactionTestBase; 6 | 7 | public abstract class HibernateTestBase extends TransactionTestBase { 8 | @Inject 9 | protected Session session; 10 | } 11 | -------------------------------------------------------------------------------- /nab-testbase/src/main/java/ru/hh/nab/testbase/jpa/JpaTestBase.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.testbase.jpa; 2 | 3 | import jakarta.inject.Inject; 4 | import jakarta.persistence.EntityManager; 5 | import ru.hh.nab.testbase.transaction.TransactionTestBase; 6 | 7 | public abstract class JpaTestBase extends TransactionTestBase { 8 | @Inject 9 | protected EntityManager entityManager; 10 | } 11 | -------------------------------------------------------------------------------- /nab-common/src/main/java/ru/hh/nab/common/constants/RequestAttributes.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.common.constants; 2 | 3 | public class RequestAttributes { 4 | 5 | public static final String CODE_FUNCTION = "codeFunction"; 6 | public static final String CODE_NAMESPACE = "codeNamespace"; 7 | public static final String HTTP_ROUTE = "httpRoute"; 8 | 9 | private RequestAttributes() {} 10 | } 11 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/monitoring/MetricsTrackerFactoryProvider.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.monitoring; 2 | 3 | import com.zaxxer.hikari.metrics.MetricsTrackerFactory; 4 | import java.util.Properties; 5 | 6 | @FunctionalInterface 7 | public interface MetricsTrackerFactoryProvider { 8 | T create(Properties dataSourceProperties); 9 | } 10 | -------------------------------------------------------------------------------- /nab-jpa/src/main/java/ru/hh/nab/jpa/EntityManagerFactoryCreationHandler.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.jpa; 2 | 3 | import jakarta.persistence.EntityManagerFactory; 4 | import jakarta.persistence.spi.PersistenceUnitInfo; 5 | import java.util.function.BiConsumer; 6 | 7 | @FunctionalInterface 8 | public interface EntityManagerFactoryCreationHandler extends BiConsumer { 9 | } 10 | -------------------------------------------------------------------------------- /nab-hibernate/src/main/java/ru/hh/nab/hibernate/monitoring/HibernateMetrics.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.hibernate.monitoring; 2 | 3 | public final class HibernateMetrics { 4 | 5 | public static final String QUERY_PLAN_CACHE_HIT_COUNT = "hibernate.query_plan_cache.hit_count"; 6 | public static final String QUERY_PLAN_CACHE_MISS_COUNT = "hibernate.query_plan_cache.miss_count"; 7 | 8 | private HibernateMetrics() {} 9 | } 10 | -------------------------------------------------------------------------------- /nab-testbase/src/main/java/ru/hh/nab/testbase/web/WebTestBase.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.testbase.web; 2 | 3 | import jakarta.inject.Inject; 4 | import org.junit.jupiter.api.extension.ExtendWith; 5 | import ru.hh.nab.testbase.extensions.SpringExtensionWithFailFast; 6 | 7 | @ExtendWith({SpringExtensionWithFailFast.class}) 8 | public abstract class WebTestBase { 9 | 10 | @Inject 11 | protected ResourceHelper resourceHelper; 12 | } 13 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/jersey/resolver/CharacterEscapeBase.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.resolver; 2 | 3 | public final class CharacterEscapeBase { 4 | private CharacterEscapeBase() {} 5 | 6 | static final char REPLACEMENT_CHAR = '\uFFFD'; 7 | 8 | public static boolean isInvalidTextSymbol(char c) { 9 | return (c < 0x20 && c != 0x9 && c != 0xA && c != 0xD) || (c == 0xFFFE || c == 0xFFFF); 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /nab-metrics/src/main/java/ru/hh/nab/metrics/SimpleHistogram.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.metrics; 2 | 3 | /** 4 | * Простая реализация. Сохраняет значения "как есть" 5 | */ 6 | public class SimpleHistogram extends Histogram { 7 | 8 | public SimpleHistogram(int maxHistogramSize) { 9 | super(maxHistogramSize); 10 | } 11 | 12 | @Override 13 | protected int calculateValue(int value) { 14 | return value; 15 | } 16 | 17 | } 18 | 19 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/aspect/ExecuteOnDataSourceWrappedException.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.aspect; 2 | 3 | import static java.util.Objects.requireNonNull; 4 | 5 | public final class ExecuteOnDataSourceWrappedException extends RuntimeException { 6 | public ExecuteOnDataSourceWrappedException(Throwable cause) { 7 | super("Checked exception from @ExecuteOnDataSource", requireNonNull(cause)); 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/retry/policy/Never.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer.retry.policy; 2 | 3 | import java.time.Instant; 4 | import java.util.Optional; 5 | import ru.hh.nab.kafka.consumer.retry.MessageProcessingHistory; 6 | 7 | public record Never() implements RetryPolicy { 8 | @Override 9 | public Optional getNextRetryTime(MessageProcessingHistory history) { 10 | return Optional.empty(); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /nab-hibernate/src/main/java/ru/hh/nab/hibernate/interceptor/RequestIdPassingInterceptor.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.hibernate.interceptor; 2 | 3 | import org.hibernate.resource.jdbc.spi.StatementInspector; 4 | import ru.hh.nab.hibernate.util.SqlRequestIdCommenter; 5 | 6 | public class RequestIdPassingInterceptor implements StatementInspector { 7 | @Override 8 | public String inspect(String sql) { 9 | return SqlRequestIdCommenter.addRequestIdComment(sql); 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /nab-logging/src/main/java/ru/hh/nab/logging/ListAppender.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.logging; 2 | 3 | import ch.qos.logback.classic.spi.ILoggingEvent; 4 | 5 | public class ListAppender extends ch.qos.logback.core.read.ListAppender { 6 | 7 | public synchronized String getLogLineBySubstring(String substring) { 8 | return list.stream().map(ILoggingEvent::getFormattedMessage).filter(m -> m.contains(substring)).findFirst().orElse(null); 9 | } 10 | } 11 | 12 | -------------------------------------------------------------------------------- /nab-hibernate/src/main/java/ru/hh/nab/hibernate/interceptor/ControllerPassingInterceptor.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.hibernate.interceptor; 2 | 3 | import org.hibernate.resource.jdbc.spi.StatementInspector; 4 | import ru.hh.nab.common.mdc.MDC; 5 | 6 | public class ControllerPassingInterceptor implements StatementInspector { 7 | @Override 8 | public String inspect(String sql) { 9 | return MDC.getController().map(s -> "/* " + s.replace('*', '_') + " */" + sql).orElse(sql); 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/healthcheck/UnhealthyDataSourceException.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.healthcheck; 2 | 3 | import java.sql.SQLException; 4 | 5 | public class UnhealthyDataSourceException extends SQLException { 6 | 7 | private static final String ERROR_MESSAGE = "DataSource %s is unhealthy"; 8 | 9 | public UnhealthyDataSourceException(String dataSourceName, Throwable cause) { 10 | super(String.format(ERROR_MESSAGE, dataSourceName), cause); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /nab-testbase/src/main/java/ru/hh/nab/testbase/kafka/NoopKafkaConsumerFactory.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.testbase.kafka; 2 | 3 | import ru.hh.nab.kafka.consumer.ConsumerBuilder; 4 | import ru.hh.nab.kafka.consumer.KafkaConsumerFactory; 5 | 6 | @SuppressWarnings("unused") 7 | public class NoopKafkaConsumerFactory implements KafkaConsumerFactory { 8 | 9 | @Override 10 | public ConsumerBuilder builder(String topicName, Class messageClass) { 11 | return new NoopConsumerBuilder<>(); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/aspect/ExecuteOnDataSourceTransactionCallbackFactory.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.aspect; 2 | 3 | import org.aspectj.lang.ProceedingJoinPoint; 4 | import org.springframework.transaction.support.TransactionCallback; 5 | import ru.hh.nab.datasource.annotation.ExecuteOnDataSource; 6 | 7 | public interface ExecuteOnDataSourceTransactionCallbackFactory { 8 | TransactionCallback create(ProceedingJoinPoint pjp, ExecuteOnDataSource executeOnDataSource); 9 | } 10 | -------------------------------------------------------------------------------- /nab-metrics/src/main/java/ru/hh/nab/metrics/RangedHistogram.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.metrics; 2 | 3 | /** 4 | * Округляет значения до степени двойки. Нужно для уменьшения количество значений в гистограмме 5 | */ 6 | public class RangedHistogram extends Histogram { 7 | 8 | public RangedHistogram(int maxHistogramSize) { 9 | super(maxHistogramSize); 10 | } 11 | 12 | @Override 13 | protected int calculateValue(int value) { 14 | return Integer.highestOneBit(value - 1) << 1; 15 | } 16 | 17 | } 18 | 19 | -------------------------------------------------------------------------------- /nab-testbase/src/test/java/ru/hh/nab/testbase/kafka/NoopKafkaConsumerFactoryTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.testbase.kafka; 2 | 3 | import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; 4 | import org.junit.jupiter.api.Test; 5 | 6 | public class NoopKafkaConsumerFactoryTest { 7 | 8 | @Test 9 | public void successfulBuildAndStartNoopConsumer() { 10 | assertDoesNotThrow(() -> new NoopKafkaConsumerFactory() 11 | .builder(null, null) 12 | .build() 13 | .start()); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /nab-common/src/main/java/ru/hh/nab/common/constants/RequestHeaders.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.common.constants; 2 | 3 | public final class RequestHeaders { 4 | private RequestHeaders() { 5 | } 6 | public static final String REQUEST_ID = "X-Request-Id"; 7 | public static final String EMPTY_REQUEST_ID = "noRequestId"; 8 | public static final String EMPTY_USER_AGENT = "noUserAgent"; 9 | public static final String REQUEST_SOURCE = "x-source"; 10 | public static final String LOAD_TESTING = "x-load-testing"; 11 | } 12 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/exceptions/NabMappableException.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.exceptions; 2 | 3 | import static java.util.Objects.requireNonNull; 4 | import org.glassfish.jersey.server.internal.process.MappableException; 5 | 6 | /** 7 | * Automatically unwrapped by jersey. See {@link MappableException}. 8 | */ 9 | public class NabMappableException extends MappableException { 10 | public NabMappableException(Throwable cause) { 11 | super("Wrapper exception for jersey mappers", requireNonNull(cause)); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/src/main/java/ru/hh/nab/telemetry/jdbc/internal/extractor/ConnectionSpanNameExtractor.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry.jdbc.internal.extractor; 2 | 3 | import ru.hh.nab.telemetry.jdbc.internal.model.NabDataSourceInfo; 4 | 5 | public class ConnectionSpanNameExtractor extends DataSourceNameExtractor { 6 | 7 | final static String GET_CONNECTION = "getConnection"; 8 | 9 | @Override 10 | public String extract(NabDataSourceInfo nabDataSourceInfo) { 11 | return GET_CONNECTION + " " + super.extract(nabDataSourceInfo); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/validation/DataSourcesReadyTarget.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.validation; 2 | 3 | import java.util.List; 4 | import javax.sql.DataSource; 5 | 6 | /** 7 | * Class is meant to provide a reliable point in bean's graph 8 | * where initialization performed for & by DataSources completed. 9 | */ 10 | public class DataSourcesReadyTarget { 11 | 12 | private final List dataSources; 13 | 14 | public DataSourcesReadyTarget(List dataSources) { 15 | this.dataSources = dataSources; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/exceptions/ExceptionSerializer.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.exceptions; 2 | 3 | import jakarta.servlet.http.HttpServletRequest; 4 | import jakarta.servlet.http.HttpServletResponse; 5 | import jakarta.ws.rs.core.Response; 6 | 7 | public interface ExceptionSerializer { 8 | /** 9 | * Returns true if serializer can be used to produce the response. 10 | */ 11 | boolean isCompatible(HttpServletRequest request, HttpServletResponse response); 12 | 13 | Response serializeException(Response.StatusType statusCode, Exception exception); 14 | } 15 | -------------------------------------------------------------------------------- /nab-logging/src/main/java/ru/hh/nab/logging/json/NabJsonLayout.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.logging.json; 2 | 3 | import net.logstash.logback.layout.LogstashLayout; 4 | import static ru.hh.nab.logging.json.JsonFieldNames.DEFAULT_TIMESTAMP_FORMAT; 5 | 6 | public class NabJsonLayout extends LogstashLayout { 7 | public NabJsonLayout() { 8 | super(); 9 | 10 | setFieldNames(LogstashFields.DEFAULT.getFieldNames()); 11 | setIncludeMdc(true); 12 | setIncludeContext(false); 13 | setIncludeCallerData(false); 14 | setTimestampPattern(DEFAULT_TIMESTAMP_FORMAT); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /nab-logging/src/main/java/ru/hh/nab/logging/override/SkipLogLevelOverrideException.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.logging.override; 2 | 3 | /** 4 | * Special exception for {@link LogLevelOverrideExtension} which force to skip overriding without any error. 5 | * It may be useful in case of handling errors on extension side without excess propagation. 6 | */ 7 | public class SkipLogLevelOverrideException extends RuntimeException { 8 | 9 | public SkipLogLevelOverrideException() { 10 | super(); 11 | } 12 | 13 | public SkipLogLevelOverrideException(String s) { 14 | super(s); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /nab-logging/src/main/java/ru/hh/nab/logging/json/NabTSOnlyJsonLayout.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.logging.json; 2 | 3 | import net.logstash.logback.layout.LogstashLayout; 4 | import static ru.hh.nab.logging.json.JsonFieldNames.DEFAULT_TIMESTAMP_FORMAT; 5 | 6 | public class NabTSOnlyJsonLayout extends LogstashLayout { 7 | public NabTSOnlyJsonLayout() { 8 | super(); 9 | 10 | setFieldNames(LogstashFields.TS_ONLY.getFieldNames()); 11 | setIncludeMdc(false); 12 | setIncludeContext(false); 13 | setIncludeCallerData(false); 14 | setTimestampPattern(DEFAULT_TIMESTAMP_FORMAT); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/exceptions/MappableExceptionUtils.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.exceptions; 2 | 3 | import java.util.concurrent.Callable; 4 | import java.util.concurrent.Future; 5 | 6 | public final class MappableExceptionUtils { 7 | private MappableExceptionUtils() { 8 | } 9 | 10 | public static T getOrThrowMappable(Callable supplier) { 11 | return ru.hh.nab.common.util.ExceptionUtils.getOrThrow(supplier, NabMappableException::new); 12 | } 13 | 14 | public static T getOrThrowMappable(Future future) { 15 | return getOrThrowMappable(future::get); 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /nab-common/src/main/java/ru/hh/nab/common/qualifier/NamedQualifier.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.common.qualifier; 2 | 3 | public final class NamedQualifier { 4 | 5 | public static final String SERVICE_NAME = "serviceName"; 6 | public static final String SERVICE_VERSION = "serviceVersion"; 7 | public static final String NODE_NAME = "nodeName"; 8 | public static final String DATACENTER = "datacenter"; 9 | public static final String DATACENTERS = "datacenters"; 10 | public static final String DEFAULT_HTTP_CLIENT_CONTEXT_SUPPLIER = "defaultHttpClientContextSupplier"; 11 | 12 | private NamedQualifier() {} 13 | } 14 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/jersey/resolver/ObjectMapperContextResolver.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.resolver; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | import jakarta.ws.rs.ext.ContextResolver; 5 | 6 | public class ObjectMapperContextResolver implements ContextResolver { 7 | public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); 8 | static { 9 | OBJECT_MAPPER.getFactory().setCharacterEscapes(new JsonCharacterEscapes()); 10 | } 11 | 12 | @Override 13 | public ObjectMapper getContext(Class type) { 14 | return OBJECT_MAPPER; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /nab-logging/src/main/java/ru/hh/nab/logging/json/JsonFieldNames.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.logging.json; 2 | 3 | public final class JsonFieldNames { 4 | public static final String TIMESTAMP = "ts"; 5 | public static final String LEVEL = "lvl"; 6 | public static final String MESSAGE = "msg"; 7 | public static final String LOGGER = "logger"; 8 | public static final String EXCEPTION = "exception"; 9 | public static final String MDC = "mdc"; 10 | public static final String APPENDER = "appender"; 11 | 12 | public static final String DEFAULT_TIMESTAMP_FORMAT = "yyyy-MM-dd' 'HH:mm:ss.SSSZ"; 13 | 14 | private JsonFieldNames() {} 15 | } 16 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/ext/OpenTelemetryJdbcExtension.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.ext; 2 | 3 | /** 4 | *

Extension provides the ability to decorate DataSource with wrapper sending OpenTelemetry spans. 5 | * This extension used if you create DataSource via {@link ru.hh.nab.datasource.DataSourceFactory}. 6 | * Pass implementation to {@link ru.hh.nab.datasource.DataSourceFactory} as a parameter instead of null to activate it.

7 | * 8 | *

Default implementation placed in nab-telemetry-jdbc module.

9 | */ 10 | @FunctionalInterface 11 | public interface OpenTelemetryJdbcExtension extends JdbcExtension { 12 | } 13 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/jersey/filter/cache/Serializer.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.filter.cache; 2 | 3 | import java.nio.ByteBuffer; 4 | import org.caffinitas.ohc.CacheSerializer; 5 | 6 | public class Serializer implements CacheSerializer { 7 | @Override 8 | public void serialize(byte[] value, ByteBuffer buf) { 9 | buf.put(value); 10 | } 11 | 12 | @Override 13 | public byte[] deserialize(ByteBuffer buf) { 14 | byte[] data = new byte[buf.remaining()]; 15 | buf.get(data); 16 | return data; 17 | } 18 | 19 | @Override 20 | public int serializedSize(byte[] value) { 21 | return value.length; 22 | } 23 | } 24 | 25 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/SeekPosition.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer; 2 | 3 | import org.springframework.kafka.support.TopicPartitionOffset; 4 | 5 | public enum SeekPosition { 6 | 7 | EARLIEST(TopicPartitionOffset.SeekPosition.BEGINNING), 8 | 9 | LATEST(TopicPartitionOffset.SeekPosition.END); 10 | 11 | private final TopicPartitionOffset.SeekPosition springKafkaSeekPosition; 12 | 13 | SeekPosition(TopicPartitionOffset.SeekPosition springKafkaSeekPosition) { 14 | this.springKafkaSeekPosition = springKafkaSeekPosition; 15 | } 16 | 17 | public TopicPartitionOffset.SeekPosition getSpringSeekPosition() { 18 | return springKafkaSeekPosition; 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /nab-telemetry/src/main/java/ru/hh/nab/telemetry/TelemetryProcessorFactory.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry; 2 | 3 | import io.opentelemetry.api.trace.Tracer; 4 | import ru.hh.jclient.common.HttpClientEventListener; 5 | 6 | public class TelemetryProcessorFactory { 7 | private final Tracer tracer; 8 | private final TelemetryPropagator telemetryPropagator; 9 | 10 | public TelemetryProcessorFactory(Tracer tracer, TelemetryPropagator telemetryPropagator) { 11 | this.tracer = tracer; 12 | this.telemetryPropagator = telemetryPropagator; 13 | } 14 | 15 | public HttpClientEventListener createHttpClientEventListener() { 16 | return new TelemetryListenerImpl(tracer, telemetryPropagator); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/src/main/java/ru/hh/nab/telemetry/jdbc/internal/getter/ConnectionCodeAttributesGetter.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry.jdbc.internal.getter; 2 | 3 | import io.opentelemetry.instrumentation.api.instrumenter.code.CodeAttributesGetter; 4 | import ru.hh.nab.telemetry.jdbc.internal.model.NabDataSourceInfo; 5 | 6 | public class ConnectionCodeAttributesGetter implements CodeAttributesGetter { 7 | 8 | @Override 9 | public Class getCodeClass(NabDataSourceInfo nabDataSourceInfo) { 10 | return nabDataSourceInfo.getDataSource().getClass(); 11 | } 12 | 13 | @Override 14 | public String getMethodName(NabDataSourceInfo nabDataSourceInfo) { 15 | return "getConnection"; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/src/main/java/ru/hh/nab/telemetry/jdbc/NabTelemetryJdbcExtension.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry.jdbc; 2 | 3 | import javax.sql.DataSource; 4 | import ru.hh.nab.datasource.ext.OpenTelemetryJdbcExtension; 5 | 6 | public class NabTelemetryJdbcExtension implements OpenTelemetryJdbcExtension { 7 | 8 | private final NabTelemetryDataSourceFactory nabTelemetryDataSourceFactory; 9 | 10 | public NabTelemetryJdbcExtension(NabTelemetryDataSourceFactory nabTelemetryDataSourceFactory) { 11 | this.nabTelemetryDataSourceFactory = nabTelemetryDataSourceFactory; 12 | } 13 | 14 | @Override 15 | public DataSource wrap(DataSource dataSourceToWrap) { 16 | return nabTelemetryDataSourceFactory.wrap(dataSourceToWrap); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /nab-jpa/src/main/java/ru/hh/nab/jpa/EntityManagerFactoryId.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.jpa; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | 9 | /** 10 | * This annotation is necessary in order to be able to identify the EntityManagerFactory in case of using multiple factories (for example it can be 11 | * useful while sending metrics) 12 | * All {@link org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean} beans should be marked with this annotation 13 | */ 14 | @Retention(RetentionPolicy.RUNTIME) 15 | @Target(ElementType.METHOD) 16 | public @interface EntityManagerFactoryId { 17 | String value(); 18 | } 19 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/src/main/java/ru/hh/nab/telemetry/jdbc/internal/getter/ExtendedNameJdbcAttributesGetter.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry.jdbc.internal.getter; 2 | 3 | import jakarta.annotation.Nullable; 4 | import ru.hh.nab.telemetry.jdbc.internal.extractor.DataSourceNameExtractor; 5 | import ru.hh.nab.telemetry.jdbc.internal.model.NabDbRequest; 6 | 7 | public class ExtendedNameJdbcAttributesGetter extends NabJdbcAttributesGetter { 8 | 9 | private static final DataSourceNameExtractor dataSourceNameExtractor = new DataSourceNameExtractor(); 10 | 11 | @Nullable 12 | @Override 13 | public String getName(NabDbRequest request) { 14 | return dataSourceNameExtractor.extract(request.getNabDataSourceInfo()) + " " + super.getName(request); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/transaction/TransactionalScope.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.transaction; 2 | 3 | import java.util.function.Supplier; 4 | import org.springframework.transaction.annotation.Transactional; 5 | 6 | public class TransactionalScope { 7 | 8 | @Transactional(readOnly = true) 9 | public T read(Supplier supplier) { 10 | return supplier.get(); 11 | } 12 | 13 | @Transactional(readOnly = true) 14 | public void read(Runnable runnable) { 15 | runnable.run(); 16 | } 17 | 18 | @Transactional 19 | public T write(Supplier supplier) { 20 | return supplier.get(); 21 | } 22 | 23 | @Transactional 24 | public void write(Runnable runnable) { 25 | runnable.run(); 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /nab-jpa/src/main/java/ru/hh/nab/jpa/NabPersistenceUnitPostProcessor.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.jpa; 2 | 3 | import java.util.List; 4 | import org.springframework.orm.jpa.persistenceunit.MutablePersistenceUnitInfo; 5 | import org.springframework.orm.jpa.persistenceunit.PersistenceUnitPostProcessor; 6 | 7 | public class NabPersistenceUnitPostProcessor implements PersistenceUnitPostProcessor { 8 | 9 | private final List managedClassNames; 10 | 11 | public NabPersistenceUnitPostProcessor(List managedClassNames) { 12 | this.managedClassNames = managedClassNames; 13 | } 14 | 15 | @Override 16 | public void postProcessPersistenceUnitInfo(MutablePersistenceUnitInfo pui) { 17 | managedClassNames.forEach(pui::addManagedClassName); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/retry/policy/Deadline.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer.retry.policy; 2 | 3 | import java.time.Instant; 4 | import java.util.Objects; 5 | import java.util.Optional; 6 | import ru.hh.nab.kafka.consumer.retry.MessageProcessingHistory; 7 | 8 | public record Deadline(RetryPolicy base, Instant deadline) implements RetryPolicy { 9 | public Deadline(RetryPolicy base, Instant deadline) { 10 | this.base = Objects.requireNonNull(base); 11 | this.deadline = Objects.requireNonNull(deadline); 12 | } 13 | 14 | @Override 15 | public Optional getNextRetryTime(MessageProcessingHistory history) { 16 | return base 17 | .getNextRetryTime(history) 18 | .filter(deadline::isAfter); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /nab-telemetry/src/main/java/ru/hh/nab/telemetry/IdGeneratorImpl.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry; 2 | 3 | import io.opentelemetry.sdk.trace.IdGenerator; 4 | import ru.hh.trace.TraceContextUnsafe; 5 | import ru.hh.trace.TraceIdGenerator; 6 | 7 | public class IdGeneratorImpl implements IdGenerator { 8 | 9 | private final TraceContextUnsafe traceContext; 10 | 11 | public IdGeneratorImpl(TraceContextUnsafe traceContext) { 12 | this.traceContext = traceContext; 13 | } 14 | 15 | @Override 16 | public String generateSpanId() { 17 | return IdGenerator.random().generateSpanId(); 18 | } 19 | 20 | @Override 21 | public String generateTraceId() { 22 | return traceContext.getStrictTraceId().orElseGet(TraceIdGenerator::generateTraceId); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/exception/NabKafkaException.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.exception; 2 | 3 | /** 4 | * The base class of all other nab kafka exceptions 5 | */ 6 | public class NabKafkaException extends RuntimeException { 7 | public NabKafkaException(String message) { 8 | super(message); 9 | } 10 | 11 | public NabKafkaException(String message, Throwable cause) { 12 | super(message, cause); 13 | } 14 | 15 | public NabKafkaException(Throwable cause) { 16 | super(cause); 17 | } 18 | 19 | public NabKafkaException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { 20 | super(message, cause, enableSuppression, writableStackTrace); 21 | } 22 | 23 | public NabKafkaException() { 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/src/main/java/ru/hh/nab/telemetry/jdbc/internal/model/NabDbInfo.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry.jdbc.internal.model; 2 | 3 | import io.opentelemetry.instrumentation.jdbc.internal.dbinfo.DbInfo; 4 | 5 | public class NabDbInfo { 6 | 7 | private DbInfo dbInfo; 8 | private NabDataSourceInfo nabDataSourceInfo; 9 | 10 | public DbInfo getDbInfo() { 11 | return dbInfo; 12 | } 13 | 14 | public NabDbInfo setDbInfo(DbInfo dbInfo) { 15 | this.dbInfo = dbInfo; 16 | return this; 17 | } 18 | 19 | public NabDataSourceInfo getNabDataSourceInfo() { 20 | return nabDataSourceInfo; 21 | } 22 | 23 | public NabDbInfo setNabDataSourceInfo(NabDataSourceInfo nabDataSourceInfo) { 24 | this.nabDataSourceInfo = nabDataSourceInfo; 25 | return this; 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/serialization/JacksonDeserializerSupplier.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.serialization; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | import org.apache.kafka.common.serialization.Deserializer; 5 | import org.springframework.kafka.support.serializer.JsonDeserializer; 6 | import ru.hh.nab.kafka.consumer.DeserializerSupplier; 7 | 8 | public class JacksonDeserializerSupplier implements DeserializerSupplier { 9 | 10 | private final ObjectMapper objectMapper; 11 | 12 | public JacksonDeserializerSupplier(ObjectMapper objectMapper) { 13 | this.objectMapper = objectMapper; 14 | } 15 | 16 | @Override 17 | public Deserializer supplyFor(Class clazz) { 18 | return new JsonDeserializer<>(clazz, objectMapper, false); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /nab-telemetry/src/test/java/ru/hh/nab/telemetry/TestResource.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry; 2 | 3 | import jakarta.ws.rs.DefaultValue; 4 | import jakarta.ws.rs.GET; 5 | import jakarta.ws.rs.Path; 6 | import jakarta.ws.rs.PathParam; 7 | 8 | @Path("/") 9 | public class TestResource { 10 | @GET 11 | public String root() { 12 | return "root"; 13 | } 14 | @GET 15 | @Path("/simple") 16 | public String simple() { 17 | return "Hello, world!"; 18 | } 19 | 20 | @GET 21 | @Path("/simple/{name}/greeting") 22 | public String simpleWithParam(@PathParam("name") @DefaultValue("world") String name) { 23 | return "Hello, %s!".formatted(name); 24 | } 25 | 26 | @GET 27 | @Path("/error") 28 | public String error() { 29 | throw new RuntimeException("Error description!"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /nab-hibernate/src/main/java/ru/hh/nab/hibernate/adapter/NabHibernateJpaVendorAdapter.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.hibernate.adapter; 2 | 3 | import jakarta.persistence.spi.PersistenceProvider; 4 | import org.hibernate.jpa.HibernatePersistenceProvider; 5 | import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter; 6 | 7 | public class NabHibernateJpaVendorAdapter extends HibernateJpaVendorAdapter { 8 | 9 | private final HibernatePersistenceProvider hibernatePersistenceProvider; 10 | 11 | public NabHibernateJpaVendorAdapter(HibernatePersistenceProvider hibernatePersistenceProvider) { 12 | this.hibernatePersistenceProvider = hibernatePersistenceProvider; 13 | } 14 | 15 | @Override 16 | public PersistenceProvider getPersistenceProvider() { 17 | return hibernatePersistenceProvider; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/retry/RetryPolicyResolver.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer.retry; 2 | 3 | import java.util.function.BiFunction; 4 | import org.apache.kafka.clients.consumer.ConsumerRecord; 5 | import ru.hh.nab.kafka.consumer.retry.policy.RetryPolicy; 6 | 7 | /** 8 | * Decides what {@link RetryPolicy} to use for concrete message and processing error 9 | */ 10 | @FunctionalInterface 11 | public interface RetryPolicyResolver extends BiFunction, Throwable, RetryPolicy> { 12 | 13 | static RetryPolicyResolver never() { 14 | return (consumerRecord, throwable) -> RetryPolicy.never(); 15 | } 16 | 17 | static RetryPolicyResolver always(RetryPolicy retryPolicy) { 18 | return (consumerRecord, throwable) -> retryPolicy; 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/monitoring/NabMetricsTrackerFactoryProvider.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.monitoring; 2 | 3 | import java.util.Properties; 4 | import ru.hh.nab.metrics.StatsDSender; 5 | 6 | public class NabMetricsTrackerFactoryProvider implements MetricsTrackerFactoryProvider { 7 | private final String serviceName; 8 | private final StatsDSender statsDSender; 9 | 10 | public NabMetricsTrackerFactoryProvider(String serviceName, StatsDSender statsDSender) { 11 | this.serviceName = serviceName; 12 | this.statsDSender = statsDSender; 13 | } 14 | 15 | @Override 16 | public NabMetricsTrackerFactory create(Properties dataSourceProperties) { 17 | return new NabMetricsTrackerFactory(serviceName, statsDSender, dataSourceProperties); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/serialization/JacksonSerializerSupplier.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.serialization; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | import org.apache.kafka.common.serialization.Serializer; 5 | import org.springframework.kafka.support.serializer.JsonSerializer; 6 | import ru.hh.nab.kafka.producer.SerializerSupplier; 7 | 8 | public class JacksonSerializerSupplier implements SerializerSupplier { 9 | 10 | private final ObjectMapper objectMapper; 11 | 12 | public JacksonSerializerSupplier(ObjectMapper objectMapper) { 13 | this.objectMapper = objectMapper; 14 | } 15 | 16 | public Serializer supply() { 17 | JsonSerializer serializer = new JsonSerializer<>(objectMapper); 18 | serializer.setAddTypeInfo(false); 19 | return serializer; 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/src/main/java/ru/hh/nab/telemetry/jdbc/internal/model/NabDbRequest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry.jdbc.internal.model; 2 | 3 | import io.opentelemetry.instrumentation.jdbc.internal.DbRequest; 4 | 5 | public class NabDbRequest { 6 | 7 | private DbRequest dbRequest; 8 | private NabDataSourceInfo nabDataSourceInfo; 9 | 10 | public DbRequest getDbRequest() { 11 | return dbRequest; 12 | } 13 | 14 | public NabDbRequest setDbRequest(DbRequest dbRequest) { 15 | this.dbRequest = dbRequest; 16 | return this; 17 | } 18 | 19 | public NabDataSourceInfo getNabDataSourceInfo() { 20 | return nabDataSourceInfo; 21 | } 22 | 23 | public NabDbRequest setNabDataSourceInfo(NabDataSourceInfo nabDataSourceInfo) { 24 | this.nabDataSourceInfo = nabDataSourceInfo; 25 | return this; 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /nab-common/src/main/java/ru/hh/nab/common/mdc/MDCTraceContextListener.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.common.mdc; 2 | 3 | import static java.util.Optional.ofNullable; 4 | import ru.hh.trace.Scope; 5 | import ru.hh.trace.TraceContextListener; 6 | 7 | public class MDCTraceContextListener implements TraceContextListener { 8 | 9 | @Override 10 | public Scope onTraceStart(String traceId, String strictTraceId) { 11 | String previousTraceId = MDC.getRequestId().orElse(null); 12 | MDC.setRequestId(traceId); 13 | return new MDCTraceContextListenerScope(previousTraceId); 14 | } 15 | 16 | private record MDCTraceContextListenerScope(String previousTraceId) implements Scope { 17 | @Override 18 | public void close() { 19 | ofNullable(previousTraceId).ifPresentOrElse(MDC::setRequestId, MDC::clearRequestId); 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/retry/policy/Fixed.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer.retry.policy; 2 | 3 | import java.time.Duration; 4 | import java.time.Instant; 5 | import java.util.Optional; 6 | import ru.hh.nab.kafka.consumer.retry.MessageProcessingHistory; 7 | 8 | public record Fixed(Duration delay) implements RetryPolicy { 9 | public Fixed { 10 | if (delay.isNegative()) { 11 | throw new IllegalArgumentException("Delay should be positive"); 12 | } 13 | if (delay.isZero()) { 14 | throw new IllegalArgumentException("Explicitly use Never policy instead of zero delay"); 15 | } 16 | } 17 | 18 | @Override 19 | public Optional getNextRetryTime(MessageProcessingHistory history) { 20 | return Optional.of(history 21 | .lastFailTime() 22 | .plus(delay)); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/NamedDataSource.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource; 2 | 3 | import java.sql.SQLException; 4 | import java.util.Optional; 5 | import javax.sql.DataSource; 6 | 7 | /** 8 | * Wrapper for DataSource which holds unique {@link #getName() name}. 9 | */ 10 | public class NamedDataSource extends DelegatingDataSource { 11 | 12 | public static Optional getName(DataSource dataSource) { 13 | try { 14 | return Optional.of(dataSource.unwrap(NamedDataSource.class).getName()); 15 | } catch (SQLException e) { 16 | return Optional.empty(); 17 | } 18 | } 19 | 20 | private final String name; 21 | 22 | public NamedDataSource(String name, DataSource delegate) { 23 | super(delegate); 24 | this.name = name; 25 | } 26 | 27 | public String getName() { 28 | return name; 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /nab-logging/src/main/java/ru/hh/nab/logging/json/NabTSOnlyJsonEncoder.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.logging.json; 2 | 3 | import net.logstash.logback.encoder.LogstashEncoder; 4 | import static ru.hh.nab.logging.json.JsonFieldNames.DEFAULT_TIMESTAMP_FORMAT; 5 | 6 | public class NabTSOnlyJsonEncoder extends LogstashEncoder { 7 | public NabTSOnlyJsonEncoder() { 8 | this("undefined-appender-name", false); 9 | } 10 | 11 | public NabTSOnlyJsonEncoder(String appenderName, boolean includeAppenderName) { 12 | super(); 13 | 14 | setFieldNames(LogstashFields.TS_ONLY.getFieldNames()); 15 | setIncludeMdc(false); 16 | setIncludeContext(false); 17 | setIncludeCallerData(false); 18 | setTimestampPattern(DEFAULT_TIMESTAMP_FORMAT); 19 | if (includeAppenderName) { 20 | addProvider(new AppenderNameJsonProvider(appenderName)); 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/http/CacheUtils.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.http; 2 | 3 | import jakarta.servlet.http.HttpServletResponse; 4 | 5 | public final class CacheUtils { 6 | public static final String CACHE_CONTROL = "Cache-Control"; 7 | public static final String EXPIRES = "Expires"; 8 | 9 | private CacheUtils() {} 10 | 11 | public static void applyCache(HttpServletResponse response, int seconds) { 12 | if (seconds > 0) { 13 | response.setDateHeader(EXPIRES, System.currentTimeMillis() + seconds * 1000L); 14 | response.setHeader(CACHE_CONTROL, "max-age=" + seconds); 15 | } else { 16 | noCache(response); 17 | } 18 | } 19 | 20 | public static void noCache(HttpServletResponse response) { 21 | response.setDateHeader(EXPIRES, 1L); 22 | response.setHeader(CACHE_CONTROL, "must-revalidate,no-cache,no-store"); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /nab-logging/src/main/java/ru/hh/nab/logging/json/NabJsonEncoder.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.logging.json; 2 | 3 | import net.logstash.logback.encoder.LogstashEncoder; 4 | import static ru.hh.nab.logging.json.JsonFieldNames.DEFAULT_TIMESTAMP_FORMAT; 5 | 6 | public class NabJsonEncoder extends LogstashEncoder { 7 | 8 | public NabJsonEncoder() { 9 | this("undefined-appender-name", false); 10 | } 11 | 12 | public NabJsonEncoder(String appenderName, boolean includeAppenderName) { 13 | super(); 14 | 15 | setFieldNames(LogstashFields.DEFAULT.getFieldNames()); 16 | setIncludeMdc(true); 17 | setIncludeContext(false); 18 | setIncludeCallerData(false); 19 | setTimestampPattern(DEFAULT_TIMESTAMP_FORMAT); 20 | setLineSeparator("\n"); 21 | 22 | if (includeAppenderName) { 23 | addProvider(new AppenderNameJsonProvider(appenderName)); 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/exception/ConfigurationException.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.exception; 2 | 3 | /** 4 | * Any invalid configuration that is part of the public API must be a subclass of this class and be part of this 5 | * package. 6 | */ 7 | public class ConfigurationException extends NabKafkaException { 8 | public ConfigurationException(String message) { 9 | super(message); 10 | } 11 | 12 | public ConfigurationException(String message, Throwable cause) { 13 | super(message, cause); 14 | } 15 | 16 | public ConfigurationException(Throwable cause) { 17 | super(cause); 18 | } 19 | 20 | public ConfigurationException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { 21 | super(message, cause, enableSuppression, writableStackTrace); 22 | } 23 | 24 | public ConfigurationException() { 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /nab-kafka/src/test/java/ru/hh/nab/kafka/consumer/TopicConsumerMock.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer; 2 | 3 | import java.util.ArrayList; 4 | import java.util.List; 5 | import java.util.stream.Collectors; 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import static org.junit.jupiter.api.Assertions.assertEquals; 8 | 9 | public class TopicConsumerMock implements ConsumeStrategy { 10 | 11 | private List receivedBatch = new ArrayList<>(); 12 | 13 | @Override 14 | public void onMessagesBatch(List> messages, Ack ack) { 15 | receivedBatch = messages 16 | .stream() 17 | .map(ConsumerRecord::value) 18 | .collect(Collectors.toList()); 19 | 20 | ack.acknowledge(); 21 | } 22 | 23 | void assertMessagesEquals(List expectedMessages) { 24 | assertEquals(expectedMessages, receivedBatch); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/jersey/NabPriorities.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey; 2 | 3 | import jakarta.ws.rs.Priorities; 4 | import ru.hh.nab.common.servlet.ServletFilterPriorities; 5 | 6 | /** 7 | * Nab-specific priority constants 8 | * Extend {@link jakarta.ws.rs.Priorities} with custom priority levels 9 | *

10 | * Jax-rs filters are invoked after servlet filters (see servlet filter priorities in {@link ServletFilterPriorities}) 11 | */ 12 | public class NabPriorities { 13 | 14 | /** 15 | * Observability filter/interceptor priority. 16 | */ 17 | public static final int OBSERVABILITY = 500; 18 | 19 | /** 20 | * Cache filter/interceptor priority. 21 | */ 22 | public static final int CACHE = 750; 23 | 24 | /** 25 | * Filter/interceptor priority for post user stage. 26 | */ 27 | public static final int LOW_PRIORITY = Priorities.USER + 1; 28 | 29 | } 30 | -------------------------------------------------------------------------------- /nab-kafka/src/test/java/ru/hh/nab/kafka/consumer/retry/RetryTopicsTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer.retry; 2 | 3 | import static org.junit.jupiter.api.Assertions.assertEquals; 4 | import org.junit.jupiter.api.Test; 5 | import ru.hh.nab.kafka.consumer.ConsumerMetadata; 6 | 7 | class RetryTopicsTest { 8 | 9 | static final ConsumerMetadata CONSUMER_METADATA = new ConsumerMetadata( 10 | "service-with-retries", 11 | "mainTopic", 12 | "FizzBuzzProcessing" 13 | ); 14 | 15 | @Test 16 | void defaultRetrySendTopic() { 17 | assertEquals("maintopic_service_with_retries_fizzbuzzprocessing_retry_send", RetryTopics.defaultRetrySendTopic(CONSUMER_METADATA)); 18 | } 19 | 20 | @Test 21 | void defaultRetryReceiveTopic() { 22 | assertEquals("maintopic_service_with_retries_fizzbuzzprocessing_retry_receive", RetryTopics.defaultRetryReceiveTopic(CONSUMER_METADATA)); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /nab-sentry/src/main/java/ru/hh/nab/sentry/SentryTraceContextListener.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.sentry; 2 | 3 | import static java.util.Optional.ofNullable; 4 | import ru.hh.trace.Scope; 5 | import ru.hh.trace.TraceContextListener; 6 | 7 | public class SentryTraceContextListener implements TraceContextListener { 8 | 9 | @Override 10 | public Scope onTraceStart(String traceId, String strictTraceId) { 11 | String previousTraceId = SentryScopeConfigurator.getTraceId().orElse(null); 12 | SentryScopeConfigurator.setTraceId(strictTraceId); 13 | return new SentryTraceContextListenerScope(previousTraceId); 14 | } 15 | 16 | private record SentryTraceContextListenerScope(String previousTraceId) implements Scope { 17 | @Override 18 | public void close() { 19 | ofNullable(previousTraceId).ifPresentOrElse(SentryScopeConfigurator::setTraceId, SentryScopeConfigurator::clearTraceId); 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /nab-web/src/test/java/ru/hh/nab/web/NabWebTestLogbackBaseConfigurator.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web; 2 | 3 | import ch.qos.logback.classic.Level; 4 | import java.util.Properties; 5 | import ru.hh.nab.logging.NabLoggingConfiguratorTemplate; 6 | 7 | public class NabWebTestLogbackBaseConfigurator extends NabLoggingConfiguratorTemplate { 8 | 9 | @Override 10 | protected Properties createLoggingProperties() { 11 | Properties properties = new Properties(); 12 | setPropertyIfNotSet(properties, "log.pattern", "[%date{ISO8601}] %-5level %logger{36}:%line mdc={%mdc} - %msg%n"); 13 | setPropertyIfNotSet(properties, "log.dir", "logs"); 14 | setPropertyIfNotSet(properties, "log.immediate.flush", Boolean.TRUE.toString()); 15 | return properties; 16 | } 17 | 18 | @Override 19 | public void configure(LoggingContextWrapper context) { 20 | var rootLogger = getRootLogger(context); 21 | rootLogger.setLevel(Level.ERROR); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/retry/policy/Progressive.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer.retry.policy; 2 | 3 | import java.time.Duration; 4 | import java.time.Instant; 5 | import java.util.Objects; 6 | import java.util.Optional; 7 | import ru.hh.nab.kafka.consumer.retry.MessageProcessingHistory; 8 | 9 | public record Progressive(DelayByRetryNumber delayByRetryNumber) implements RetryPolicy { 10 | public Progressive(DelayByRetryNumber delayByRetryNumber) { 11 | this.delayByRetryNumber = Objects.requireNonNull(delayByRetryNumber); 12 | } 13 | 14 | @Override 15 | public Optional getNextRetryTime(MessageProcessingHistory history) { 16 | return Optional.of(history 17 | .lastFailTime() 18 | .plus(delayByRetryNumber.getDelay(history.retryNumber()))); 19 | } 20 | 21 | @FunctionalInterface 22 | public interface DelayByRetryNumber { 23 | Duration getDelay(long retryNumber); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /nab-common/src/main/java/ru/hh/nab/common/util/ExceptionUtils.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.common.util; 2 | 3 | import java.util.concurrent.Callable; 4 | import java.util.function.Function; 5 | 6 | public final class ExceptionUtils { 7 | private ExceptionUtils() { 8 | } 9 | 10 | public static T getOrThrow(Callable callable) { 11 | return getOrThrow(callable, RuntimeException::new); 12 | } 13 | 14 | public static T getOrThrow(Callable callable, 15 | Function checkedExceptionMapper) { 16 | try { 17 | return callable.call(); 18 | } catch (InterruptedException e) { 19 | Thread.currentThread().interrupt(); 20 | throw checkedExceptionMapper.apply(e); 21 | } catch (RuntimeException e) { 22 | throw e; 23 | } catch (Exception e) { 24 | throw checkedExceptionMapper.apply(e); 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /nab-kafka/src/test/java/ru/hh/nab/kafka/consumer/ClusterMetadataProviderTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer; 2 | 3 | import java.util.concurrent.ExecutionException; 4 | import static org.junit.jupiter.api.Assertions.assertEquals; 5 | import org.junit.jupiter.api.Test; 6 | 7 | public class ClusterMetadataProviderTest extends KafkaConsumerTestBase { 8 | 9 | 10 | @Test 11 | public void testGetPartitionsCountByClusterMetadataProvider() throws InterruptedException, ExecutionException { 12 | DefaultConsumerFactory defaultConsumerFactory = (DefaultConsumerFactory) consumerFactory; 13 | ClusterMetadataProvider clusterMetadataProvider = new ClusterMetadataProvider(defaultConsumerFactory); 14 | 15 | assertEquals(5, clusterMetadataProvider.getPartitionsInfo(topicName).size()); 16 | 17 | addPartitions(topicName, 7); 18 | 19 | waitUntil(() -> assertEquals(7, clusterMetadataProvider.getPartitionsInfo(topicName).size())); 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /nab-metrics/src/test/java/ru/hh/nab/metrics/RangedHistogramTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.metrics; 2 | 3 | import java.util.Map; 4 | import static org.junit.jupiter.api.Assertions.assertEquals; 5 | import org.junit.jupiter.api.Test; 6 | 7 | public class RangedHistogramTest { 8 | 9 | @Test 10 | public void testSave() { 11 | RangedHistogram histogram = new RangedHistogram(128); 12 | // 0 -> 0 13 | histogram.save(0); 14 | // 4 -> 4 15 | histogram.save(4); 16 | // 6 -> 8 17 | histogram.save(6); 18 | // 7 -> 8 19 | histogram.save(7); 20 | // 8 -> 8 21 | histogram.save(8); 22 | // 100 -> 128 23 | histogram.save(100); 24 | 25 | Map values = histogram.getValueToCountAndReset(); 26 | assertEquals(4, values.size()); 27 | assertEquals(values.get(0), 1); 28 | assertEquals(values.get(4), 1); 29 | assertEquals(values.get(8), 3); 30 | assertEquals(values.get(128), 1); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/retry/policy/RetryLimit.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer.retry.policy; 2 | 3 | import java.time.Instant; 4 | import java.util.Objects; 5 | import java.util.Optional; 6 | import ru.hh.nab.kafka.consumer.retry.MessageProcessingHistory; 7 | 8 | public record RetryLimit(RetryPolicy base, long limit) implements RetryPolicy { 9 | public RetryLimit(RetryPolicy base, long limit) { 10 | this.base = Objects.requireNonNull(base); 11 | if (limit < 0) { 12 | throw new IllegalArgumentException("Limit should be positive"); 13 | } else if (limit == 0) { 14 | throw new IllegalArgumentException("Explicitly use Never policy instead of limit 0"); 15 | } 16 | this.limit = limit; 17 | } 18 | 19 | @Override 20 | public Optional getNextRetryTime(MessageProcessingHistory history) { 21 | return history.retryNumber() > limit ? Optional.empty() : base.getNextRetryTime(history); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /nab-jpa/src/main/java/ru/hh/nab/jpa/MappingConfig.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.jpa; 2 | 3 | import java.util.Arrays; 4 | import java.util.LinkedHashSet; 5 | import java.util.Set; 6 | 7 | public final class MappingConfig { 8 | private final Set> annotatedClasses = new LinkedHashSet<>(); 9 | private final Set packagesToScan = new LinkedHashSet<>(); 10 | 11 | public MappingConfig(Class... entityClasses) { 12 | annotatedClasses.addAll(Arrays.asList(entityClasses)); 13 | } 14 | 15 | public void addEntityClass(Class entityClass) { 16 | annotatedClasses.add(entityClass); 17 | } 18 | 19 | public void addPackagesToScan(String... packageNames) { 20 | packagesToScan.addAll(Arrays.asList(packageNames)); 21 | } 22 | 23 | public Class[] getAnnotatedClasses() { 24 | return annotatedClasses.toArray(new Class[0]); 25 | } 26 | 27 | public String[] getPackagesToScan() { 28 | return packagesToScan.toArray(new String[0]); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/exceptions/ExecutionExceptionMapper.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.exceptions; 2 | 3 | import jakarta.annotation.Priority; 4 | import jakarta.ws.rs.core.MediaType; 5 | import java.util.concurrent.ExecutionException; 6 | import org.eclipse.microprofile.openapi.annotations.media.Content; 7 | import org.eclipse.microprofile.openapi.annotations.media.Schema; 8 | import org.eclipse.microprofile.openapi.annotations.responses.APIResponse; 9 | import ru.hh.errors.common.Errors; 10 | import static ru.hh.nab.web.jersey.NabPriorities.LOW_PRIORITY; 11 | 12 | @Priority(LOW_PRIORITY) 13 | @APIResponse( 14 | responseCode = "500", 15 | description = "Internal Server Error", 16 | content = @Content( 17 | mediaType = MediaType.APPLICATION_JSON, 18 | schema = @Schema( 19 | implementation = Errors.class 20 | ) 21 | ) 22 | ) 23 | public class ExecutionExceptionMapper extends UnwrappingExceptionMapper { 24 | } 25 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/exceptions/CompletionExceptionMapper.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.exceptions; 2 | 3 | import jakarta.annotation.Priority; 4 | import jakarta.ws.rs.core.MediaType; 5 | import java.util.concurrent.CompletionException; 6 | import org.eclipse.microprofile.openapi.annotations.media.Content; 7 | import org.eclipse.microprofile.openapi.annotations.media.Schema; 8 | import org.eclipse.microprofile.openapi.annotations.responses.APIResponse; 9 | import ru.hh.errors.common.Errors; 10 | import static ru.hh.nab.web.jersey.NabPriorities.LOW_PRIORITY; 11 | 12 | @Priority(LOW_PRIORITY) 13 | @APIResponse( 14 | responseCode = "500", 15 | description = "Internal Server Error", 16 | content = @Content( 17 | mediaType = MediaType.APPLICATION_JSON, 18 | schema = @Schema( 19 | implementation = Errors.class 20 | ) 21 | ) 22 | ) 23 | public class CompletionExceptionMapper extends UnwrappingExceptionMapper { 24 | } 25 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/producer/KafkaSendResult.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.producer; 2 | 3 | import org.apache.kafka.clients.producer.ProducerRecord; 4 | import org.apache.kafka.clients.producer.RecordMetadata; 5 | 6 | public class KafkaSendResult { 7 | 8 | private final ProducerRecord producerRecord; 9 | private final RecordMetadata recordMetadata; 10 | 11 | public KafkaSendResult(ProducerRecord producerRecord, RecordMetadata recordMetadata) { 12 | this.producerRecord = producerRecord; 13 | this.recordMetadata = recordMetadata; 14 | } 15 | 16 | public ProducerRecord getProducerRecord() { 17 | return this.producerRecord; 18 | } 19 | 20 | public RecordMetadata getRecordMetadata() { 21 | return this.recordMetadata; 22 | } 23 | 24 | @Override 25 | public String toString() { 26 | return '{' + 27 | "producerRecord: " + producerRecord + 28 | ", recordMetadata: " + recordMetadata + 29 | '}'; 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /nab-metrics/src/main/java/ru/hh/nab/metrics/MultiTags.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.metrics; 2 | 3 | import java.util.Arrays; 4 | import java.util.Comparator; 5 | 6 | class MultiTags extends Tags { 7 | 8 | private final Tag[] tags; 9 | 10 | MultiTags(Tag[] tags) { 11 | Arrays.sort(tags, Comparator.comparing(tag -> tag.name)); 12 | this.tags = tags; 13 | } 14 | 15 | @Override 16 | Tag[] getTags() { 17 | return tags; 18 | } 19 | 20 | @Override 21 | public boolean equals(Object thatObject) { 22 | if (this == thatObject) { 23 | return true; 24 | } 25 | if (thatObject == null || getClass() != thatObject.getClass()) { 26 | return false; 27 | } 28 | 29 | MultiTags thatTags = (MultiTags) thatObject; 30 | 31 | return Arrays.equals(tags, thatTags.tags); 32 | } 33 | 34 | @Override 35 | public int hashCode() { 36 | return Arrays.hashCode(tags); 37 | } 38 | 39 | @Override 40 | public String toString() { 41 | return Arrays.toString(tags); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /nab-jpa/src/main/java/ru/hh/nab/jpa/aspect/ExecuteOnDataSourceTransactionCallbackFactoryImpl.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.jpa.aspect; 2 | 3 | import jakarta.persistence.EntityManager; 4 | import org.aspectj.lang.ProceedingJoinPoint; 5 | import org.springframework.transaction.support.TransactionCallback; 6 | import ru.hh.nab.datasource.annotation.ExecuteOnDataSource; 7 | import ru.hh.nab.datasource.aspect.ExecuteOnDataSourceTransactionCallbackFactory; 8 | 9 | public class ExecuteOnDataSourceTransactionCallbackFactoryImpl implements ExecuteOnDataSourceTransactionCallbackFactory { 10 | 11 | private final EntityManager entityManager; 12 | 13 | public ExecuteOnDataSourceTransactionCallbackFactoryImpl(EntityManager entityManager) { 14 | this.entityManager = entityManager; 15 | } 16 | 17 | @Override 18 | public TransactionCallback create(ProceedingJoinPoint pjp, ExecuteOnDataSource executeOnDataSource) { 19 | return new ExecuteOnDataSourceTransactionCallback(pjp, entityManager, executeOnDataSource); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /nab-logging/src/main/java/ru/hh/nab/logging/json/AppenderNameJsonProvider.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.logging.json; 2 | 3 | import ch.qos.logback.classic.spi.ILoggingEvent; 4 | import com.fasterxml.jackson.core.JsonGenerator; 5 | import java.io.IOException; 6 | import net.logstash.logback.composite.AbstractFieldJsonProvider; 7 | import static ru.hh.nab.logging.json.JsonFieldNames.APPENDER; 8 | 9 | /** 10 | * JsonProvider для добавления имени appender'а в JSON лог 11 | */ 12 | public class AppenderNameJsonProvider extends AbstractFieldJsonProvider { 13 | 14 | private String appenderName; 15 | 16 | public AppenderNameJsonProvider(String appenderName) { 17 | this.appenderName = appenderName; 18 | setFieldName(APPENDER); 19 | } 20 | 21 | @Override 22 | public void writeTo(JsonGenerator generator, ILoggingEvent event) throws IOException { 23 | if (appenderName != null && !appenderName.isEmpty()) { 24 | generator.writeStringField(getFieldName(), appenderName + ".slog"); 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /nab-jclient/src/main/java/ru/hh/nab/jclient/metrics/MetricsConsumerFactory.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.jclient.metrics; 2 | 3 | import static java.util.Optional.ofNullable; 4 | import java.util.Properties; 5 | import ru.hh.jclient.common.metrics.MetricsConsumer; 6 | import ru.hh.nab.metrics.StatsDSender; 7 | 8 | public class MetricsConsumerFactory { 9 | private MetricsConsumerFactory() {} 10 | 11 | private static final MetricsConsumer NOOP_METRICS_CONSUMER = metricsProvider -> {}; 12 | 13 | public static MetricsConsumer buildMetricsConsumer(Properties properties, String name, StatsDSender statsDSender) { 14 | if (!ofNullable(properties.getProperty("enabled")).map(Boolean::parseBoolean).orElse(Boolean.FALSE)) { 15 | return NOOP_METRICS_CONSUMER; 16 | } 17 | 18 | return ofNullable(properties.getProperty("sendIntervalSec")) 19 | .map(Integer::parseInt) 20 | .map(sendIntervalSec -> new StatsDMetricsConsumer(name, statsDSender, sendIntervalSec)) 21 | .orElse(NOOP_METRICS_CONSUMER); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/retry/policy/Ttl.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer.retry.policy; 2 | 3 | import java.time.Duration; 4 | import java.time.Instant; 5 | import java.util.Objects; 6 | import java.util.Optional; 7 | import ru.hh.nab.kafka.consumer.retry.MessageProcessingHistory; 8 | 9 | public record Ttl(RetryPolicy base, Duration ttl) implements RetryPolicy { 10 | public Ttl(RetryPolicy base, Duration ttl) { 11 | this.base = Objects.requireNonNull(base); 12 | if (ttl.isNegative()) { 13 | throw new IllegalArgumentException("Ttl should be positive"); 14 | } else if (ttl.isZero()) { 15 | throw new IllegalArgumentException("Explicitly use Never policy instead of zero ttl"); 16 | } 17 | this.ttl = ttl; 18 | } 19 | 20 | @Override 21 | public Optional getNextRetryTime(MessageProcessingHistory history) { 22 | return base 23 | .getNextRetryTime(history) 24 | .filter(history 25 | .creationTime() 26 | .plus(ttl)::isAfter); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/jersey/filter/cache/Header.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.filter.cache; 2 | 3 | import java.io.Serializable; 4 | 5 | public class Header implements Serializable { 6 | private static final long serialVersionUID = -1198547959156704720L; 7 | 8 | public final String header; 9 | public final String value; 10 | 11 | Header(String header, String value) { 12 | this.header = header; 13 | this.value = value; 14 | } 15 | 16 | @Override 17 | public boolean equals(Object o) { 18 | if (this == o) { 19 | return true; 20 | } 21 | 22 | if (o == null || getClass() != o.getClass()) { 23 | return false; 24 | } 25 | 26 | Header header1 = (Header) o; 27 | 28 | if (!header.equals(header1.header)) { 29 | return false; 30 | } 31 | 32 | return value.equals(header1.value); 33 | } 34 | 35 | @Override 36 | public int hashCode() { 37 | int result = header.hashCode(); 38 | result = 31 * result + value.hashCode(); 39 | return result; 40 | } 41 | } 42 | 43 | -------------------------------------------------------------------------------- /nab-common/src/test/java/ru/hh/nab/common/servlet/UriComponentTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.common.servlet; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | import java.util.stream.Collectors; 6 | import java.util.stream.Stream; 7 | import static org.junit.jupiter.api.Assertions.assertEquals; 8 | import org.junit.jupiter.api.Test; 9 | 10 | public class UriComponentTest { 11 | @Test 12 | public void testDecode() { 13 | var fullKey = "fullForm"; 14 | var values = List.of("value1", "value2"); 15 | var shortKey = "shortForm"; 16 | String queryString = Stream 17 | .concat(Stream.of(shortKey), values.stream().map(value -> String.join("=", fullKey, value))) 18 | .collect(Collectors.joining("&")); 19 | Map> result = UriComponent.decodeQuery(queryString, true, true); 20 | List actual = result.get(fullKey); 21 | assertEquals(values, actual); 22 | List shortParam = result.get(shortKey); 23 | assertEquals(1, shortParam.size()); 24 | assertEquals("", shortParam.get(0)); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /nab-hibernate/src/main/java/ru/hh/nab/hibernate/service/NabServiceContributor.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.hibernate.service; 2 | 3 | import java.util.List; 4 | import org.hibernate.boot.registry.StandardServiceRegistryBuilder; 5 | import org.hibernate.service.Service; 6 | import org.hibernate.service.spi.ServiceContributor; 7 | 8 | public class NabServiceContributor implements ServiceContributor { 9 | 10 | private final List> serviceSuppliers; 11 | 12 | public NabServiceContributor(List> serviceSuppliers) { 13 | this.serviceSuppliers = serviceSuppliers; 14 | } 15 | 16 | @Override 17 | public void contribute(StandardServiceRegistryBuilder serviceRegistryBuilder) { 18 | serviceSuppliers.forEach(serviceSupplier -> apply(serviceRegistryBuilder, serviceSupplier)); 19 | } 20 | 21 | private static void apply(StandardServiceRegistryBuilder serviceRegistryBuilder, ServiceSupplier serviceSupplier) { 22 | serviceRegistryBuilder.addService(serviceSupplier.getClazz(), serviceSupplier.get()); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /nab-telemetry-kafka/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | ru.hh.nab 5 | nuts-and-bolts-parent 6 | 22.0.5-SNAPSHOT 7 | ../pom.xml 8 | 9 | 4.0.0 10 | 11 | nab-telemetry-kafka 12 | jar 13 | 14 | nuts'n'bolts telemetry for kafka 15 | 16 | 17 | 18 | ru.hh.nab 19 | nab-kafka 20 | 21 | 22 | io.opentelemetry 23 | opentelemetry-semconv 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /nab-telemetry/src/main/java/ru/hh/nab/telemetry/ContextStorage.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry; 2 | 3 | import io.opentelemetry.context.Context; 4 | import io.opentelemetry.context.Scope; 5 | import ru.hh.jclient.common.util.storage.Storage; 6 | import ru.hh.jclient.common.util.storage.Transfer; 7 | 8 | public class ContextStorage implements Storage { 9 | @Override 10 | public Context get() { 11 | return Context.current(); 12 | } 13 | 14 | @Override 15 | public void set(Context context) { } 16 | 17 | @Override 18 | public void clear() { } 19 | 20 | @Override 21 | public Transfer prepareTransferToAnotherThread() { 22 | return new PreparedContextTransfer(); 23 | } 24 | 25 | public static class PreparedContextTransfer implements Transfer { 26 | private final Context context = Context.current(); 27 | private Scope scope; 28 | 29 | @Override 30 | public void perform() { 31 | scope = context.makeCurrent(); 32 | } 33 | 34 | @Override 35 | public void rollback() { 36 | scope.close(); 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/exceptions/SecurityExceptionMapper.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.exceptions; 2 | 3 | import jakarta.annotation.Priority; 4 | import jakarta.ws.rs.core.MediaType; 5 | import jakarta.ws.rs.core.Response; 6 | import org.eclipse.microprofile.openapi.annotations.media.Content; 7 | import org.eclipse.microprofile.openapi.annotations.media.Schema; 8 | import org.eclipse.microprofile.openapi.annotations.responses.APIResponse; 9 | import ru.hh.errors.common.Errors; 10 | import static ru.hh.nab.web.jersey.NabPriorities.LOW_PRIORITY; 11 | 12 | @Priority(LOW_PRIORITY) 13 | @APIResponse( 14 | responseCode = "403", 15 | description = "Forbidden", 16 | content = @Content( 17 | mediaType = MediaType.APPLICATION_JSON, 18 | schema = @Schema( 19 | implementation = Errors.class 20 | ) 21 | ) 22 | ) 23 | public class SecurityExceptionMapper extends NabExceptionMapper { 24 | public SecurityExceptionMapper() { 25 | super(Response.Status.FORBIDDEN, LoggingLevel.INFO_WITHOUT_STACK_TRACE); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/exceptions/ServerTimeoutExceptionMapper.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.starter.exceptions; 2 | 3 | import jakarta.ws.rs.core.MediaType; 4 | import org.eclipse.microprofile.openapi.annotations.media.Content; 5 | import org.eclipse.microprofile.openapi.annotations.media.Schema; 6 | import org.eclipse.microprofile.openapi.annotations.responses.APIResponse; 7 | import ru.hh.deadline.context.ServerTimeoutException; 8 | import ru.hh.errors.common.Errors; 9 | import ru.hh.nab.web.exceptions.NabExceptionMapper; 10 | import ru.hh.nab.web.http.HttpStatus; 11 | 12 | @APIResponse( 13 | responseCode = "577", 14 | description = "Server Timeout", 15 | content = @Content( 16 | mediaType = MediaType.APPLICATION_JSON, 17 | schema = @Schema( 18 | implementation = Errors.class 19 | ) 20 | ) 21 | ) 22 | public class ServerTimeoutExceptionMapper extends NabExceptionMapper { 23 | public ServerTimeoutExceptionMapper() { 24 | super(HttpStatus.SERVER_TIMEOUT, LoggingLevel.ERROR_WITH_STACK_TRACE); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /nab-sentry/src/main/java/ru/hh/nab/sentry/SentryEventProcessor.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.sentry; 2 | 3 | import io.sentry.EventProcessor; 4 | import io.sentry.Hint; 5 | import io.sentry.SentryEvent; 6 | import java.util.List; 7 | import java.util.Optional; 8 | import ru.hh.nab.common.mdc.MDC; 9 | import static ru.hh.nab.common.mdc.MDC.CONTROLLER_MDC_KEY; 10 | 11 | public class SentryEventProcessor implements EventProcessor { 12 | 13 | @Override 14 | public SentryEvent process(SentryEvent event, Hint hint) { 15 | MDC.getController().ifPresent(controller -> { 16 | if (Optional.ofNullable(event.getFingerprints()).map(List::isEmpty).orElse(false)) { 17 | event.setFingerprints(List.of("{{ default }}", controller)); 18 | } else { 19 | List fingerprints = event.getFingerprints(); 20 | if (!fingerprints.contains(controller)) { 21 | fingerprints.add(controller); 22 | event.setFingerprints(fingerprints); 23 | } 24 | } 25 | event.setTag(CONTROLLER_MDC_KEY, controller); 26 | }); 27 | return event; 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/exceptions/IllegalStateExceptionMapper.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.exceptions; 2 | 3 | import jakarta.annotation.Priority; 4 | import jakarta.ws.rs.core.MediaType; 5 | import static jakarta.ws.rs.core.Response.Status.CONFLICT; 6 | import org.eclipse.microprofile.openapi.annotations.media.Content; 7 | import org.eclipse.microprofile.openapi.annotations.media.Schema; 8 | import org.eclipse.microprofile.openapi.annotations.responses.APIResponse; 9 | import ru.hh.errors.common.Errors; 10 | import static ru.hh.nab.web.jersey.NabPriorities.LOW_PRIORITY; 11 | 12 | @Priority(LOW_PRIORITY) 13 | @APIResponse( 14 | responseCode = "409", 15 | description = "Conflict", 16 | content = @Content( 17 | mediaType = MediaType.APPLICATION_JSON, 18 | schema = @Schema( 19 | implementation = Errors.class 20 | ) 21 | ) 22 | ) 23 | public class IllegalStateExceptionMapper extends NabExceptionMapper { 24 | public IllegalStateExceptionMapper() { 25 | super(CONFLICT, LoggingLevel.ERROR_WITH_STACK_TRACE); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /nab-jclient/src/test/java/ru/hh/nab/jclient/UriCompactionUtilTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.jclient; 2 | 3 | import java.util.StringJoiner; 4 | import static org.junit.jupiter.api.Assertions.assertEquals; 5 | import org.junit.jupiter.api.Test; 6 | import ru.hh.jclient.common.Uri; 7 | 8 | public class UriCompactionUtilTest { 9 | 10 | @Test 11 | public void testCompactionWorksForNumbersAndHexHashes() { 12 | var replacement = "<>"; 13 | Uri uri = Uri.create("http://localhost:2800/resource/123456/daba9e610001f70104003acc866d55656d6a5a/get"); 14 | assertEquals( 15 | new StringJoiner("/", "/", "").add("resource").add(replacement).add(replacement).add("get").toString(), 16 | UriCompactionUtil.compactUri(uri, 4, 16, replacement) 17 | ); 18 | } 19 | 20 | @Test 21 | public void testCompactionDoesNotWorkForShortNumbersAndNonHexHashes() { 22 | String expected = "/resource/123/daka9e610001f70104003acc866d55656d6a5a/get"; 23 | Uri uri = Uri.create("http://localhost:2800" + expected); 24 | assertEquals(expected, UriCompactionUtil.compactUri(uri, 4, 16, "<>")); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /nab-metrics/src/main/java/ru/hh/nab/metrics/CompactHistogram.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.metrics; 2 | 3 | /** 4 | * This class represents a histogram which compacts recorded metrics of a certain interval into a single value 5 | * to avoid bloating memory with unique values by sacrificing precision. 6 | */ 7 | public class CompactHistogram extends Histogram { 8 | 9 | /** 10 | * Controls the size of a cluster and precision. Value must be power of 2. 11 | */ 12 | private final int compactionRatio; 13 | 14 | public CompactHistogram(int histogramSize, int compactionRatio) { 15 | super(histogramSize); 16 | this.compactionRatio = Integer.highestOneBit(compactionRatio); 17 | } 18 | 19 | /** 20 | * Returns value after compaction procedure. 21 | *

22 | * Recorded value 30 with compactionRatio of 32 will be compacted into cluster point 16. 23 | * 24 | * @param value recorded value 25 | * @return compacted value 26 | */ 27 | @Override 28 | protected int calculateValue(int value) { 29 | return value == 0 ? 0 : ((value - 1) | compactionRatio - 1) + 1; 30 | } 31 | 32 | } 33 | 34 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/exceptions/IllegalArgumentExceptionMapper.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.exceptions; 2 | 3 | import jakarta.annotation.Priority; 4 | import jakarta.ws.rs.core.MediaType; 5 | import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; 6 | import org.eclipse.microprofile.openapi.annotations.media.Content; 7 | import org.eclipse.microprofile.openapi.annotations.media.Schema; 8 | import org.eclipse.microprofile.openapi.annotations.responses.APIResponse; 9 | import ru.hh.errors.common.Errors; 10 | import static ru.hh.nab.web.jersey.NabPriorities.LOW_PRIORITY; 11 | 12 | @Priority(LOW_PRIORITY) 13 | @APIResponse( 14 | responseCode = "400", 15 | description = "Bad Request", 16 | content = @Content( 17 | mediaType = MediaType.APPLICATION_JSON, 18 | schema = @Schema( 19 | implementation = Errors.class 20 | ) 21 | ) 22 | ) 23 | public class IllegalArgumentExceptionMapper extends NabExceptionMapper { 24 | public IllegalArgumentExceptionMapper() { 25 | super(BAD_REQUEST, LoggingLevel.ERROR_WITH_STACK_TRACE); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/exceptions/UnwrappingExceptionMapper.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.exceptions; 2 | 3 | import jakarta.inject.Inject; 4 | import jakarta.ws.rs.WebApplicationException; 5 | import jakarta.ws.rs.core.Response; 6 | import jakarta.ws.rs.ext.ExceptionMapper; 7 | import org.glassfish.jersey.server.internal.process.MappableException; 8 | import org.glassfish.jersey.spi.ExceptionMappers; 9 | 10 | public abstract class UnwrappingExceptionMapper implements ExceptionMapper { 11 | 12 | @Inject 13 | private ExceptionMappers mappers; 14 | 15 | @Override 16 | public Response toResponse(T exception) { 17 | Throwable cause = exception.getCause(); 18 | if (cause != null) { 19 | ExceptionMapper mapper = mappers.findMapping(cause); 20 | if (mapper != null) { 21 | return mapper.toResponse(cause); 22 | } 23 | 24 | if (cause instanceof WebApplicationException) { 25 | return ((WebApplicationException) cause).getResponse(); 26 | } 27 | } 28 | 29 | throw new MappableException(cause == null ? exception : cause); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/http/HttpStatus.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.http; 2 | 3 | import jakarta.ws.rs.core.Response; 4 | import jakarta.ws.rs.core.Response.Status; 5 | 6 | public enum HttpStatus implements Response.StatusType { 7 | INSUFFICIENT_TIMEOUT(477, "Insufficient timeout"), 8 | SERVER_TIMEOUT(577, "Server timeout"), 9 | SERVICE_PARTIALLY_UNAVAILABLE(597, "Service partially unavailable"); 10 | 11 | private final int statusCode; 12 | private final String reasonPhrase; 13 | private final Status.Family family; 14 | 15 | HttpStatus(int statusCode, String reasonPhrase) { 16 | this.statusCode = statusCode; 17 | this.reasonPhrase = reasonPhrase; 18 | this.family = Status.Family.familyOf(statusCode); 19 | } 20 | 21 | @Override 22 | public int getStatusCode() { 23 | return statusCode; 24 | } 25 | 26 | @Override 27 | public Status.Family getFamily() { 28 | return family; 29 | } 30 | 31 | @Override 32 | public String getReasonPhrase() { 33 | return reasonPhrase; 34 | } 35 | 36 | @Override 37 | public String toString() { 38 | return reasonPhrase; 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /nab-logging/src/test/java/ru/hh/nab/logging/NabLoggingConfiguratorTemplateTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.logging; 2 | 3 | import ch.qos.logback.classic.LoggerContext; 4 | import java.util.Properties; 5 | import static org.junit.jupiter.api.Assertions.assertThrows; 6 | import org.junit.jupiter.api.Test; 7 | 8 | public class NabLoggingConfiguratorTemplateTest { 9 | 10 | @Test 11 | public void testConfigure() { 12 | NabLoggingConfiguratorTemplate configurator = new NabLoggingConfiguratorTemplate() { 13 | @Override 14 | protected Properties createLoggingProperties() { 15 | return new Properties(); 16 | } 17 | 18 | @Override 19 | public void configure(LoggingContextWrapper context) { 20 | HhMultiAppender requests = createAppender(context, "requests", () -> new HhMultiAppender(true)); 21 | HhMultiAppender libraries = createAppender(context, "requests", () -> new HhMultiAppender(true)); 22 | } 23 | }; 24 | LoggerContext lc = new LoggerContext(); 25 | configurator.setContext(lc); 26 | assertThrows(AssertionError.class, () -> configurator.configure(lc)); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/resource/StatusResource.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.resource; 2 | 3 | import jakarta.ws.rs.GET; 4 | import jakarta.ws.rs.Path; 5 | import jakarta.ws.rs.Produces; 6 | import jakarta.ws.rs.core.MediaType; 7 | import java.time.Duration; 8 | import java.util.function.Supplier; 9 | 10 | @Path("") 11 | public class StatusResource { 12 | 13 | private final String serviceName; 14 | private final String serviceVersion; 15 | private final Supplier upTimeSupplier; 16 | 17 | public StatusResource(String serviceName, String serviceVersion, Supplier upTimeSupplier) { 18 | this.serviceName = serviceName; 19 | this.serviceVersion = serviceVersion; 20 | this.upTimeSupplier = upTimeSupplier; 21 | } 22 | 23 | @GET 24 | @Produces(MediaType.TEXT_XML) 25 | public String status() { 26 | return "\n" 27 | + "\n" 28 | + " " + serviceVersion + "\n" 29 | + " " + upTimeSupplier.get().toSeconds() + "\n" 30 | + "\n"; 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/exceptions/InsufficientTimeoutExceptionMapper.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.starter.exceptions; 2 | 3 | import jakarta.ws.rs.core.MediaType; 4 | import org.eclipse.microprofile.openapi.annotations.media.Content; 5 | import org.eclipse.microprofile.openapi.annotations.media.Schema; 6 | import org.eclipse.microprofile.openapi.annotations.responses.APIResponse; 7 | import ru.hh.deadline.context.InsufficientTimeoutException; 8 | import ru.hh.errors.common.Errors; 9 | import ru.hh.nab.web.exceptions.NabExceptionMapper; 10 | import static ru.hh.nab.web.http.HttpStatus.INSUFFICIENT_TIMEOUT; 11 | 12 | @APIResponse( 13 | responseCode = "477", 14 | description = "Insufficient timeout", 15 | content = @Content( 16 | mediaType = MediaType.APPLICATION_JSON, 17 | schema = @Schema( 18 | implementation = Errors.class 19 | ) 20 | ) 21 | ) 22 | public class InsufficientTimeoutExceptionMapper extends NabExceptionMapper { 23 | public InsufficientTimeoutExceptionMapper() { 24 | super(INSUFFICIENT_TIMEOUT, LoggingLevel.ERROR_WITH_STACK_TRACE); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/src/main/java/ru/hh/nab/telemetry/jdbc/NabTelemetryDataSourceFactory.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry.jdbc; 2 | 3 | import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; 4 | import javax.sql.DataSource; 5 | import ru.hh.nab.telemetry.jdbc.internal.NabTelemetryDataSource; 6 | import ru.hh.nab.telemetry.jdbc.internal.model.NabDataSourceInfo; 7 | import ru.hh.nab.telemetry.jdbc.internal.model.NabDbRequest; 8 | 9 | public class NabTelemetryDataSourceFactory { 10 | 11 | private final Instrumenter connectionInstrumenter; 12 | private final Instrumenter statementInstrumenter; 13 | 14 | public NabTelemetryDataSourceFactory( 15 | Instrumenter connectionInstrumenter, 16 | Instrumenter statementInstrumenter 17 | ) { 18 | this.connectionInstrumenter = connectionInstrumenter; 19 | this.statementInstrumenter = statementInstrumenter; 20 | } 21 | 22 | public DataSource wrap(DataSource dataSource) { 23 | return new NabTelemetryDataSource(dataSource, connectionInstrumenter, statementInstrumenter); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /nab-logging/src/main/java/ru/hh/nab/logging/override/LogLevelOverrideExtension.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.logging.override; 2 | 3 | import java.util.Map; 4 | import java.util.concurrent.CompletableFuture; 5 | 6 | /** 7 | * Extension which provides ability to override static Loggers levels with dynamic ones. 8 | * To activate extension simply implement it as a Spring bean. 9 | *

10 | * Update interval can be set in service.properties via {@link LogLevelOverrideApplier#UPDATE_INTERVAL_IN_MINUTES_PROPERTY} setting. 11 | * If setting not specified {@link LogLevelOverrideApplier#DEFAULT_INTERVAL_IN_MINUTES} will be used by default. 12 | */ 13 | @FunctionalInterface 14 | public interface LogLevelOverrideExtension { 15 | 16 | /** 17 | * Triggers your application to retrieve log level overrides. 18 | * The extension may throw special {@link SkipLogLevelOverrideException} to skip overriding on this call without any error. 19 | * @return Future with overrides map; map contains logger names and log levels as a keys and values respectively 20 | */ 21 | CompletableFuture> loadLogLevelOverrides() throws SkipLogLevelOverrideException; 22 | } 23 | -------------------------------------------------------------------------------- /nab-telemetry/src/test/java/ru/hh/nab/telemetry/TestResourceWithSubResource.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry; 2 | 3 | import jakarta.ws.rs.DefaultValue; 4 | import jakarta.ws.rs.GET; 5 | import jakarta.ws.rs.HEAD; 6 | import jakarta.ws.rs.Path; 7 | import jakarta.ws.rs.PathParam; 8 | 9 | @Path("/resource") 10 | public class TestResourceWithSubResource { 11 | @GET 12 | @Path("/simple/{name}/greeting") 13 | public String simpleWithParam(@PathParam("name") @DefaultValue("world") String name) { 14 | return "Hello, %s!".formatted(name); 15 | } 16 | @Path("/sub") 17 | public SubResource simple() { 18 | return new SubResource(); 19 | } 20 | 21 | public static class SubResource { 22 | @GET 23 | @Path("/simple") 24 | public String simple() { 25 | return "Hello, world!"; 26 | } 27 | @HEAD 28 | @Path("/simple") 29 | public void simpleHead() { 30 | //do nothing, just for check duplicated path 31 | } 32 | @GET 33 | @Path("/simple/{name}/greeting") 34 | public String simpleWithParam(@PathParam("name") @DefaultValue("world") String name) { 35 | return "Hello, %s!".formatted(name); 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /nab-metrics/src/main/java/ru/hh/nab/metrics/Max.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.metrics; 2 | 3 | import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; 4 | 5 | /** 6 | * An aggregator that maintains a maximum value over stream of values.
7 | * For example, maximum queue size. 8 | */ 9 | public class Max { 10 | 11 | private static final AtomicIntegerFieldUpdater maxUpdater = AtomicIntegerFieldUpdater.newUpdater(Max.class, "max"); 12 | 13 | private final int defaultValue; 14 | private volatile int max; 15 | 16 | /** 17 | * @param defaultValue every value passed to {@link Max#save} must be greater or equal to this default value. 18 | */ 19 | public Max(int defaultValue) { 20 | this.defaultValue = defaultValue; 21 | max = defaultValue; 22 | } 23 | 24 | public void save(int value) { 25 | int currentMax = maxUpdater.get(this); 26 | while (value > currentMax) { 27 | boolean set = maxUpdater.compareAndSet(this, currentMax, value); 28 | if (!set) { 29 | currentMax = maxUpdater.get(this); 30 | } 31 | } 32 | } 33 | 34 | int getAndReset() { 35 | return maxUpdater.getAndSet(this, defaultValue); 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /nab-sentry/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | 5 | ru.hh.nab 6 | nuts-and-bolts-parent 7 | 22.0.5-SNAPSHOT 8 | ../pom.xml 9 | 10 | 11 | nab-sentry 12 | jar 13 | 14 | nuts'n'bolts sentry 15 | 16 | 17 | 18 | io.sentry 19 | sentry 20 | 21 | 22 | ru.hh.nab 23 | nab-common 24 | 25 | 26 | org.apache.commons 27 | commons-lang3 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/annotation/ExecuteOnDataSource.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.annotation; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | import ru.hh.nab.datasource.DataSourceType; 8 | 9 | /** 10 | * Warning: unlike {@link org.springframework.transaction.annotation.Transactional} 11 | * or {@link jakarta.transaction.Transactional}, 12 | * transactions created by this annotation will rollback on any exception. 13 | */ 14 | @Retention(RetentionPolicy.RUNTIME) 15 | @Target(ElementType.METHOD) 16 | public @interface ExecuteOnDataSource { 17 | 18 | boolean writableTx() default false; 19 | 20 | /** 21 | * see {@link DataSourceType} for common datasource types 22 | */ 23 | String dataSourceType(); 24 | 25 | boolean overrideByRequestScope() default false; 26 | 27 | /** 28 | * JPA specific attribute. So this attribute is processed only if you use nab-jpa module. Otherwise, it's useless 29 | */ 30 | DataSourceCacheMode cacheMode() default DataSourceCacheMode.NORMAL; 31 | 32 | String txManager() default ""; 33 | } 34 | -------------------------------------------------------------------------------- /nab-jpa/src/main/java/ru/hh/nab/jpa/NabEntityManagerFactoryBean.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.jpa; 2 | 3 | import jakarta.annotation.Nullable; 4 | import jakarta.persistence.EntityManagerFactory; 5 | import jakarta.persistence.spi.PersistenceUnitInfo; 6 | import java.util.Collection; 7 | import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; 8 | 9 | public class NabEntityManagerFactoryBean extends LocalContainerEntityManagerFactoryBean { 10 | 11 | @Nullable 12 | private Collection entityManagerFactoryCreationHandlers; 13 | 14 | @Override 15 | protected void postProcessEntityManagerFactory(EntityManagerFactory emf, PersistenceUnitInfo pui) { 16 | super.postProcessEntityManagerFactory(emf, pui); 17 | if (entityManagerFactoryCreationHandlers != null) { 18 | entityManagerFactoryCreationHandlers.forEach(handler -> handler.accept(emf, pui)); 19 | } 20 | } 21 | 22 | public void setEntityManagerFactoryCreationHandlers( 23 | @Nullable Collection entityManagerFactoryCreationHandlers 24 | ) { 25 | this.entityManagerFactoryCreationHandlers = entityManagerFactoryCreationHandlers; 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/DeadLetterQueue.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer; 2 | 3 | import java.util.concurrent.CompletableFuture; 4 | import org.apache.kafka.clients.consumer.ConsumerRecord; 5 | import org.apache.kafka.clients.producer.ProducerRecord; 6 | import ru.hh.nab.kafka.producer.KafkaProducer; 7 | import ru.hh.nab.kafka.producer.KafkaSendResult; 8 | 9 | class DeadLetterQueue { 10 | private final String topicName; 11 | private final KafkaProducer producer; 12 | 13 | public DeadLetterQueue(String topicName, KafkaProducer producer) { 14 | this.topicName = topicName; 15 | this.producer = producer; 16 | } 17 | 18 | public CompletableFuture> send(ConsumerRecord record) { 19 | return this.send(toProducerRecord(record)); 20 | } 21 | 22 | public CompletableFuture> send(ProducerRecord record) { 23 | return this.producer.sendMessage(record, Runnable::run); 24 | } 25 | 26 | private ProducerRecord toProducerRecord(ConsumerRecord consumerRecord) { 27 | return new ProducerRecord<>(topicName, null, consumerRecord.key(), consumerRecord.value(), consumerRecord.headers()); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/src/main/java/ru/hh/nab/telemetry/jdbc/internal/getter/NabJdbcNetAttributesGetter.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry.jdbc.internal.getter; 2 | 3 | import io.opentelemetry.instrumentation.api.instrumenter.net.NetClientAttributesGetter; 4 | import io.opentelemetry.instrumentation.jdbc.internal.JdbcNetAttributesGetter; 5 | import jakarta.annotation.Nullable; 6 | import ru.hh.nab.telemetry.jdbc.internal.model.NabDbRequest; 7 | 8 | public class NabJdbcNetAttributesGetter implements NetClientAttributesGetter { 9 | 10 | private static final JdbcNetAttributesGetter jdbcNetAttributesGetter = new JdbcNetAttributesGetter(); 11 | 12 | @Nullable 13 | @Override 14 | public String getTransport(NabDbRequest request, @Nullable Void unused) { 15 | return jdbcNetAttributesGetter.getTransport(request.getDbRequest(), unused); 16 | } 17 | 18 | @Nullable 19 | @Override 20 | public String getPeerName(NabDbRequest request) { 21 | return jdbcNetAttributesGetter.getPeerName(request.getDbRequest()); 22 | } 23 | 24 | @Nullable 25 | @Override 26 | public Integer getPeerPort(NabDbRequest request) { 27 | return jdbcNetAttributesGetter.getPeerPort(request.getDbRequest()); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /nab-jpa/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | 5 | ru.hh.nab 6 | nuts-and-bolts-parent 7 | 22.0.5-SNAPSHOT 8 | ../pom.xml 9 | 10 | 11 | nab-jpa 12 | jar 13 | 14 | nuts'n'bolts jpa and spring integration 15 | 16 | 17 | 18 | ru.hh.nab 19 | nab-data-source 20 | 21 | 22 | org.springframework 23 | spring-orm 24 | 25 | 26 | jakarta.persistence 27 | jakarta.persistence-api 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /nab-testbase/src/main/java/ru/hh/nab/testbase/redis/EmbeddedRedisFactory.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.testbase.redis; 2 | 3 | import java.util.Optional; 4 | import org.testcontainers.containers.GenericContainer; 5 | 6 | public class EmbeddedRedisFactory { 7 | 8 | private static final String DEFAULT_REDIS_IMAGE = "redis:6.2.3"; 9 | //переменная окружения для возможности оверрайда дефолтной версии Redis-контейнера 10 | public static final String REDIS_IMAGE_ENV_VARIABLE = "EXT_REDIS_IMAGE"; 11 | public static final int REDIS_DEFAULT_PORT = 6379; 12 | 13 | public static GenericContainer getEmbeddedRedis() { 14 | return EmbeddedRedisFactory.EmbeddedRedisSingleton.INSTANCE; 15 | } 16 | 17 | private static class EmbeddedRedisSingleton { 18 | private static final GenericContainer INSTANCE = createEmbeddedRedis(); 19 | private static GenericContainer createEmbeddedRedis() { 20 | String imageName = Optional.ofNullable(System.getenv(REDIS_IMAGE_ENV_VARIABLE)).orElse(DEFAULT_REDIS_IMAGE); 21 | 22 | GenericContainer container = new GenericContainer<>(imageName) 23 | .withExposedPorts(REDIS_DEFAULT_PORT); 24 | container.start(); 25 | return container; 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/healthcheck/HealthCheckHikariDataSourceFactory.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.healthcheck; 2 | 3 | import com.zaxxer.hikari.HikariConfig; 4 | import com.zaxxer.hikari.HikariDataSource; 5 | import java.util.Set; 6 | import ru.hh.nab.metrics.StatsDSender; 7 | import ru.hh.nab.metrics.Tag; 8 | import static ru.hh.nab.metrics.Tag.APP_TAG_NAME; 9 | import static ru.hh.nab.metrics.Tag.DATASOURCE_TAG_NAME; 10 | import ru.hh.nab.metrics.TaggedSender; 11 | 12 | public class HealthCheckHikariDataSourceFactory { 13 | 14 | private final String serviceName; 15 | private final StatsDSender statsDSender; 16 | 17 | public HealthCheckHikariDataSourceFactory(String serviceName, StatsDSender statsDSender) { 18 | this.serviceName = serviceName; 19 | this.statsDSender = statsDSender; 20 | } 21 | 22 | public HikariDataSource create(HikariConfig hikariConfig) { 23 | Set tags = Set.of( 24 | new Tag(APP_TAG_NAME, serviceName), 25 | new Tag(DATASOURCE_TAG_NAME, hikariConfig.getPoolName()) 26 | ); 27 | TaggedSender metricsSender = new TaggedSender(statsDSender, tags); 28 | return new HealthCheckHikariDataSource(hikariConfig, metricsSender); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /nab-hibernate/src/main/java/ru/hh/nab/hibernate/util/SqlRequestIdCommenter.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.hibernate.util; 2 | 3 | import java.util.function.Predicate; 4 | import java.util.regex.Pattern; 5 | import static java.util.regex.Pattern.CASE_INSENSITIVE; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import ru.hh.nab.common.mdc.MDC; 9 | 10 | public final class SqlRequestIdCommenter { 11 | private static final Predicate SQL_REQUEST_ID_IS_VALID = Pattern.compile("^[a-z0-9_-]+$", CASE_INSENSITIVE).asPredicate(); 12 | private static final Logger LOGGER = LoggerFactory.getLogger(SqlRequestIdCommenter.class); 13 | 14 | private SqlRequestIdCommenter() {} 15 | 16 | public static String addRequestIdComment(final String sql) { 17 | return MDC 18 | .getRequestId() 19 | .map(requestId -> { 20 | // check for sql injections and reasonable length 21 | if (requestId.length() > 100 || !SQL_REQUEST_ID_IS_VALID.test(requestId)) { 22 | LOGGER.warn("Errant request id, not including it to SQL query: {}", requestId); 23 | return sql; 24 | } 25 | return "/* " + requestId + " */ " + sql; 26 | }) 27 | .orElse(sql); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /nab-metrics/src/main/java/ru/hh/nab/metrics/AdjustingHistograms.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.metrics; 2 | 3 | import java.util.function.IntSupplier; 4 | 5 | /** 6 | * Maintains a separate {@link Histogram} for each combination of tags 7 | * Changes max allowed size dynamically 8 | */ 9 | public class AdjustingHistograms extends Histograms { 10 | 11 | private final IntSupplier maxNumOfHistogramsProvider; 12 | /** 13 | * @param maxHistogramSize an upper limit on the number of different metric values. See {@link Histogram#Histogram(int)}. 14 | * @param numOfHistogramLimit an upper limit on the number of histograms to avoid memory overflow
15 | * @param maxNumOfHistogramsProvider a dynamic limit on the number of histograms. should work fast, because will be called on each save 16 | * 17 | */ 18 | public AdjustingHistograms(int maxHistogramSize, int numOfHistogramLimit, IntSupplier maxNumOfHistogramsProvider) { 19 | super(maxHistogramSize, numOfHistogramLimit); 20 | this.maxNumOfHistogramsProvider = maxNumOfHistogramsProvider; 21 | } 22 | 23 | @Override 24 | protected int getMaxNumOfHistograms() { 25 | return Math.min(maxNumOfHistogramsProvider.getAsInt(), super.getMaxNumOfHistograms()); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/monitoring/ConnectionPoolMetrics.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.monitoring; 2 | 3 | public final class ConnectionPoolMetrics { 4 | public static final String CREATION_MS = "jdbc_pool.connection.creation_ms"; 5 | public static final String ACQUISITION_MS = "jdbc_pool.connection.acquisition_ms"; 6 | public static final String USAGE_MS = "jdbc_pool.connection.usage_ms"; 7 | public static final String TOTAL_USAGE_MS = "jdbc_pool.connection.total_usage_ms"; 8 | public static final String SAMPLED_USAGE_MS = "jdbc_pool.connection.sampled_usage_ms"; 9 | 10 | public static final String CONNECTION_TIMEOUTS = "jdbc_pool.connection_timeouts"; 11 | public static final String ACTIVE_CONNECTIONS = "jdbc_pool.active_connections"; 12 | public static final String TOTAL_CONNECTIONS = "jdbc_pool.total_connections"; 13 | public static final String IDLE_CONNECTIONS = "jdbc_pool.idle_connections"; 14 | public static final String MAX_CONNECTIONS = "jdbc_pool.max_connections"; 15 | public static final String MIN_CONNECTIONS = "jdbc_pool.min_connections"; 16 | public static final String PENDING_THREADS = "jdbc_pool.pending_threads"; 17 | 18 | 19 | private ConnectionPoolMetrics() { 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /nab-common/src/test/java/ru/hh/nab/common/mdc/MDCTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.common.mdc; 2 | 3 | import static org.junit.jupiter.api.Assertions.assertEquals; 4 | import static org.junit.jupiter.api.Assertions.assertFalse; 5 | import static org.junit.jupiter.api.Assertions.assertNull; 6 | import org.junit.jupiter.api.Test; 7 | 8 | public class MDCTest { 9 | 10 | @Test 11 | public void testSetGetClearKey() { 12 | MDC.setKey("testKey", "123"); 13 | 14 | assertEquals("123", org.slf4j.MDC.get("testKey")); 15 | assertEquals("123", MDC.getKey("testKey").get()); 16 | 17 | MDC.deleteKey("testKey"); 18 | 19 | assertNull(org.slf4j.MDC.get("testKey")); 20 | assertFalse(MDC.getKey("testKey").isPresent()); 21 | } 22 | 23 | @Test 24 | public void testGetController() { 25 | MDC.setController("123"); 26 | 27 | assertEquals("123", MDC.getController().get()); 28 | 29 | MDC.clearController(); 30 | 31 | assertFalse(MDC.getController().isPresent()); 32 | } 33 | 34 | @Test 35 | public void testGetRequestId() { 36 | MDC.setRequestId("123"); 37 | 38 | assertEquals("123", MDC.getRequestId().get()); 39 | 40 | MDC.clearRequestId(); 41 | 42 | assertFalse(MDC.getRequestId().isPresent()); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /nab-sentry/src/main/java/ru/hh/nab/sentry/SentryInitializer.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.sentry; 2 | 3 | import io.sentry.Sentry; 4 | import java.util.Properties; 5 | import org.apache.commons.lang3.StringUtils; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | 9 | public class SentryInitializer { 10 | 11 | private static final Logger LOGGER = LoggerFactory.getLogger(SentryInitializer.class); 12 | private static final String RELEASE_PROPERTY = "release"; 13 | private static final String DSN_PROPERTY = "dsn"; 14 | 15 | public static void init(Properties properties) { 16 | String dsn = properties.getProperty(DSN_PROPERTY); 17 | if (StringUtils.isBlank(dsn)) { 18 | LOGGER.warn("Sentry DSN is empty!"); 19 | } else { 20 | Sentry.init(options -> { 21 | options.setEnableExternalConfiguration(true); 22 | options.setDsn(dsn); 23 | options.setRelease(properties.getProperty(RELEASE_PROPERTY)); 24 | options.addEventProcessor(new SentryEventProcessor()); 25 | options.setEnableUncaughtExceptionHandler(false); 26 | }); 27 | // Sentry.init generates a random traceId, so we reset it to default value 28 | SentryScopeConfigurator.clearTraceId(); 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/ClusterMetadataProvider.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | import java.util.concurrent.ConcurrentHashMap; 6 | import org.apache.kafka.clients.consumer.Consumer; 7 | import org.apache.kafka.common.PartitionInfo; 8 | import org.springframework.kafka.core.ConsumerFactory; 9 | 10 | public class ClusterMetadataProvider { 11 | 12 | private final Map> springConsumerFactoryCache = new ConcurrentHashMap<>(); 13 | 14 | private final DefaultConsumerFactory defaultConsumerFactory; 15 | 16 | public ClusterMetadataProvider(DefaultConsumerFactory defaultConsumerFactory) { 17 | this.defaultConsumerFactory = defaultConsumerFactory; 18 | } 19 | 20 | public List getPartitionsInfo(String topicName) { 21 | ConsumerFactory springConsumerFactory = springConsumerFactoryCache.computeIfAbsent( 22 | topicName, 23 | k -> defaultConsumerFactory.getSpringConsumerFactory(topicName, String.class) 24 | ); 25 | try (Consumer consumer = springConsumerFactory.createConsumer()) { 26 | return consumer.partitionsFor(topicName); 27 | } 28 | } 29 | 30 | } 31 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/exceptions/NotFoundExceptionMapper.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.exceptions; 2 | 3 | import jakarta.annotation.Priority; 4 | import jakarta.ws.rs.NotFoundException; 5 | import jakarta.ws.rs.core.MediaType; 6 | import jakarta.ws.rs.core.Response; 7 | import org.eclipse.microprofile.openapi.annotations.media.Content; 8 | import org.eclipse.microprofile.openapi.annotations.media.Schema; 9 | import org.eclipse.microprofile.openapi.annotations.responses.APIResponse; 10 | import ru.hh.errors.common.Errors; 11 | import static ru.hh.nab.web.jersey.NabPriorities.LOW_PRIORITY; 12 | 13 | @Priority(LOW_PRIORITY) 14 | @APIResponse( 15 | responseCode = "404", 16 | description = "Not Found", 17 | content = @Content( 18 | mediaType = MediaType.APPLICATION_JSON, 19 | schema = @Schema( 20 | implementation = Errors.class 21 | ) 22 | ) 23 | ) 24 | public class NotFoundExceptionMapper extends NabExceptionMapper { 25 | public NotFoundExceptionMapper() { 26 | super(null, LoggingLevel.NOTHING); 27 | } 28 | 29 | @Override 30 | protected Response serializeException(Response.StatusType statusCode, NotFoundException exception) { 31 | return exception.getResponse(); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/producer/KafkaProducer.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.producer; 2 | 3 | import java.util.concurrent.CompletableFuture; 4 | import java.util.concurrent.Executor; 5 | import org.apache.kafka.clients.producer.ProducerRecord; 6 | 7 | public abstract class KafkaProducer { 8 | 9 | public final CompletableFuture> sendMessage(String topic, T kafkaMessage) { 10 | return sendMessage(topic, null, kafkaMessage, Runnable::run); 11 | } 12 | 13 | public final CompletableFuture> sendMessage(String topic, T kafkaMessage, Executor executor) { 14 | return sendMessage(topic, null, kafkaMessage, executor); 15 | } 16 | 17 | public final CompletableFuture> sendMessage(String topic, String key, T kafkaMessage) { 18 | return sendMessage(topic, key, kafkaMessage, Runnable::run); 19 | } 20 | 21 | public final CompletableFuture> sendMessage(String topic, String key, T kafkaMessage, Executor executor) { 22 | return sendMessage(new ProducerRecord<>(topic, key, kafkaMessage), executor); 23 | } 24 | 25 | public abstract CompletableFuture> sendMessage(ProducerRecord record, Executor executor); 26 | 27 | } 28 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/util/AckUtils.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.util; 2 | 3 | import java.util.Collection; 4 | import java.util.Comparator; 5 | import java.util.Map; 6 | import java.util.function.BinaryOperator; 7 | import java.util.stream.Collectors; 8 | import org.apache.kafka.clients.consumer.ConsumerRecord; 9 | import org.apache.kafka.clients.consumer.OffsetAndMetadata; 10 | import org.apache.kafka.common.TopicPartition; 11 | 12 | public class AckUtils { 13 | 14 | public static Map getLatestOffsetForEachPartition(Collection> messages) { 15 | return messages.stream().collect( 16 | Collectors.toMap( 17 | AckUtils::getMessagePartition, 18 | AckUtils::getOffsetOfNextMessage, 19 | BinaryOperator.maxBy(Comparator.comparingLong(OffsetAndMetadata::offset)) 20 | ) 21 | ); 22 | } 23 | 24 | public static OffsetAndMetadata getOffsetOfNextMessage(ConsumerRecord message) { 25 | return new OffsetAndMetadata(message.offset() + 1); 26 | } 27 | 28 | public static TopicPartition getMessagePartition(ConsumerRecord message) { 29 | return new TopicPartition(message.topic(), message.partition()); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/aspect/TransactionalAspect.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.aspect; 2 | 3 | import org.aspectj.lang.ProceedingJoinPoint; 4 | import org.aspectj.lang.annotation.Around; 5 | import org.aspectj.lang.annotation.Aspect; 6 | import org.springframework.core.Ordered; 7 | import org.springframework.core.annotation.Order; 8 | import ru.hh.nab.datasource.routing.DataSourceContextUnsafe; 9 | 10 | @Aspect 11 | @Order(Ordered.HIGHEST_PRECEDENCE) 12 | public class TransactionalAspect { 13 | 14 | @Around( 15 | value = "@annotation(org.springframework.transaction.annotation.Transactional) || @annotation(jakarta.transaction.Transactional)", 16 | argNames = "pjp" 17 | ) 18 | public Object executeOnSpecialDataSource(ProceedingJoinPoint pjp) throws Throwable { 19 | try { 20 | return DataSourceContextUnsafe.executeOn( 21 | DataSourceContextUnsafe.getDataSourceName(), 22 | () -> { 23 | try { 24 | return pjp.proceed(); 25 | } catch (Throwable e) { 26 | throw new ExecuteOnDataSourceWrappedException(e); 27 | } 28 | } 29 | ); 30 | } catch (ExecuteOnDataSourceWrappedException e) { 31 | throw e.getCause(); 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/jersey/resolver/JsonCharacterEscapes.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.resolver; 2 | 3 | import com.fasterxml.jackson.core.JsonpCharacterEscapes; 4 | import com.fasterxml.jackson.core.SerializableString; 5 | import com.fasterxml.jackson.core.io.CharacterEscapes; 6 | import com.fasterxml.jackson.core.io.SerializedString; 7 | import java.util.stream.IntStream; 8 | 9 | public class JsonCharacterEscapes extends JsonpCharacterEscapes { 10 | private static final SerializableString REPLACEMENT_STR = new SerializedString(String.valueOf(CharacterEscapeBase.REPLACEMENT_CHAR)); 11 | 12 | private static final int[] asciiEscapes = CharacterEscapes.standardAsciiEscapesForJSON(); 13 | static { 14 | IntStream 15 | .range(0, 0x20) 16 | .filter(i -> CharacterEscapeBase.isInvalidTextSymbol((char) i)) 17 | .forEach(i -> asciiEscapes[i] = CharacterEscapes.ESCAPE_CUSTOM); 18 | } 19 | 20 | @Override 21 | public int[] getEscapeCodesForAscii() { 22 | return asciiEscapes; 23 | } 24 | 25 | @Override 26 | public SerializableString getEscapeSequence(int i) { 27 | if (CharacterEscapeBase.isInvalidTextSymbol((char) i)) { 28 | return REPLACEMENT_STR; 29 | } 30 | 31 | return super.getEscapeSequence(i); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /nab-common/src/main/java/ru/hh/nab/common/exception/LoggingUncaughtExceptionHandler.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.common.exception; 2 | 3 | import java.util.concurrent.atomic.AtomicBoolean; 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | 7 | public final class LoggingUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler { 8 | 9 | private static final Logger LOGGER = LoggerFactory.getLogger(LoggingUncaughtExceptionHandler.class); 10 | 11 | private static volatile Thread.UncaughtExceptionHandler replacedDefaultExceptionHandler; 12 | private static final AtomicBoolean registered = new AtomicBoolean(false); 13 | 14 | public static void registerAsDefault() { 15 | if (registered.compareAndSet(false, true)) { 16 | replacedDefaultExceptionHandler = Thread.getDefaultUncaughtExceptionHandler(); 17 | Thread.setDefaultUncaughtExceptionHandler(new LoggingUncaughtExceptionHandler()); 18 | } 19 | } 20 | 21 | @Override 22 | public void uncaughtException(Thread t, Throwable e) { 23 | LOGGER.error("Uncaught exception in {}", t, e); 24 | if (replacedDefaultExceptionHandler != null) { 25 | replacedDefaultExceptionHandler.uncaughtException(t, e); 26 | } 27 | } 28 | 29 | private LoggingUncaughtExceptionHandler() { 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /nab-sentry/src/main/java/ru/hh/nab/sentry/SentryScopeConfigurator.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.sentry; 2 | 3 | import io.sentry.Sentry; 4 | import io.sentry.SentryTraceHeader; 5 | import io.sentry.protocol.SentryId; 6 | import java.util.Optional; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | 10 | public class SentryScopeConfigurator { 11 | 12 | private static final Logger LOGGER = LoggerFactory.getLogger(SentryScopeConfigurator.class); 13 | 14 | @SuppressWarnings("UnstableApiUsage") 15 | public static void setTraceId(String traceId) { 16 | Sentry.configureScope(scope -> { 17 | try { 18 | scope.getPropagationContext().setTraceId(new SentryId(traceId)); 19 | } catch (RuntimeException e) { 20 | LOGGER.warn("Unable to set sentry trace id: {}", traceId, e); 21 | } 22 | }); 23 | } 24 | 25 | public static Optional getTraceId() { 26 | return Optional.ofNullable(Sentry.getTraceparent()).map(SentryTraceHeader::getTraceId).map(SentryId::toString); 27 | } 28 | 29 | @SuppressWarnings("UnstableApiUsage") 30 | public static void clearTraceId() { 31 | Sentry.configureScope(scope -> scope.getPropagationContext().setTraceId(SentryId.EMPTY_ID)); 32 | } 33 | 34 | private SentryScopeConfigurator() { 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/routing/RoutingDataSourceFactory.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.routing; 2 | 3 | import javax.sql.DataSource; 4 | import ru.hh.nab.metrics.StatsDSender; 5 | 6 | public class RoutingDataSourceFactory { 7 | 8 | private final String serviceName; 9 | private final StatsDSender statsDSender; 10 | 11 | public RoutingDataSourceFactory(String serviceName, StatsDSender statsDSender) { 12 | this.serviceName = serviceName; 13 | this.statsDSender = statsDSender; 14 | } 15 | 16 | /** 17 | * It's not allowed to use this factory method if application needs to work with multiple databases. 18 | * In this case you should use {@link RoutingDataSourceFactory#create()} and inject all dataSources via 19 | * - {@link RoutingDataSource#addNamedDataSource(DataSource)} - the most preferred way 20 | * - {@link RoutingDataSource#addDataSource(String, DataSource)} 21 | * - {@link RoutingDataSource#addDataSource(String, String, DataSource)} 22 | */ 23 | public RoutingDataSource create(DataSource defaultDataSource) { 24 | return new RoutingDataSource(defaultDataSource, serviceName, statsDSender); 25 | } 26 | 27 | public RoutingDataSource create() { 28 | return new RoutingDataSource(serviceName, statsDSender); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/jersey/interceptor/SentryAppenderInterceptor.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.interceptor; 2 | 3 | import jakarta.annotation.Priority; 4 | import jakarta.servlet.http.HttpServletRequest; 5 | import jakarta.ws.rs.WebApplicationException; 6 | import jakarta.ws.rs.core.Context; 7 | import jakarta.ws.rs.ext.WriterInterceptor; 8 | import jakarta.ws.rs.ext.WriterInterceptorContext; 9 | import java.io.IOException; 10 | import java.util.Objects; 11 | import java.util.Optional; 12 | import ru.hh.nab.common.mdc.MDC; 13 | import static ru.hh.nab.common.mdc.MDC.CONTROLLER_MDC_KEY; 14 | import ru.hh.nab.web.jersey.NabPriorities; 15 | 16 | @Priority(NabPriorities.OBSERVABILITY) 17 | public class SentryAppenderInterceptor implements WriterInterceptor { 18 | 19 | @Context 20 | protected HttpServletRequest request; 21 | 22 | @Override 23 | public void aroundWriteTo(WriterInterceptorContext context) throws IOException, WebApplicationException { 24 | if (MDC.getController().isEmpty()) { 25 | Optional.ofNullable(request.getAttribute(CONTROLLER_MDC_KEY)).ifPresent(controller -> MDC.setController(Objects.toString(controller))); 26 | context.proceed(); 27 | MDC.clearController(); 28 | } else { 29 | context.proceed(); 30 | } 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /nab-web/src/test/java/ru/hh/nab/web/jersey/resolver/TestResource.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.resolver; 2 | 3 | import jakarta.ws.rs.GET; 4 | import jakarta.ws.rs.Path; 5 | import jakarta.ws.rs.Produces; 6 | import static jakarta.ws.rs.core.MediaType.APPLICATION_JSON; 7 | import static jakarta.ws.rs.core.MediaType.APPLICATION_XML; 8 | import jakarta.xml.bind.annotation.XmlElement; 9 | import jakarta.xml.bind.annotation.XmlRootElement; 10 | 11 | @Path("/") 12 | @Produces({APPLICATION_XML, APPLICATION_JSON}) 13 | public class TestResource { 14 | @GET 15 | public DTO simple() { 16 | return new DTO("test"); 17 | } 18 | 19 | @Path("/0C") 20 | @GET 21 | public DTO c() { 22 | return new DTO("\u000C"); 23 | } 24 | 25 | @Path("/FFFE") 26 | @GET 27 | public DTO fffe() { 28 | return new DTO("\uFFFE"); 29 | } 30 | 31 | @Path("/0A") 32 | @GET 33 | public DTO a() { 34 | return new DTO("\n"); 35 | } 36 | 37 | @Path("/special") 38 | @GET 39 | public DTO special() { 40 | return new DTO("&<"); 41 | } 42 | 43 | @XmlRootElement(name = "dto") 44 | public static class DTO { 45 | @XmlElement 46 | public String string; 47 | 48 | public DTO() {} 49 | 50 | public DTO(String string) { 51 | this.string = string; 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /nab-hibernate/src/test/java/ru/hh/nab/hibernate/util/SqlRequestIdCommenterTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.hibernate.util; 2 | 3 | import org.apache.commons.lang3.RandomStringUtils; 4 | import static org.junit.jupiter.api.Assertions.assertEquals; 5 | import org.junit.jupiter.api.Test; 6 | import ru.hh.nab.common.mdc.MDC; 7 | 8 | public class SqlRequestIdCommenterTest { 9 | @Test 10 | public void testSqlRequestIds() { 11 | String SQL = "SELECT * FROM resume;"; 12 | 13 | MDC.setRequestId("Valid-rid_0123"); 14 | assertEquals("/* Valid-rid_0123 */ SELECT * FROM resume;", SqlRequestIdCommenter.addRequestIdComment(SQL)); 15 | MDC.clearRequestId(); 16 | 17 | String rid = RandomStringUtils.randomAlphanumeric(100); 18 | MDC.setRequestId(rid); 19 | assertEquals(String.format("/* %s */ SELECT * FROM resume;", rid), SqlRequestIdCommenter.addRequestIdComment(SQL)); 20 | MDC.clearRequestId(); 21 | 22 | MDC.setRequestId(RandomStringUtils.randomAlphanumeric(101)); 23 | assertEquals("SELECT * FROM resume;", SqlRequestIdCommenter.addRequestIdComment(SQL)); 24 | MDC.clearRequestId(); 25 | 26 | MDC.setRequestId("*/ DELETE FROM resume; /*"); 27 | assertEquals("SELECT * FROM resume;", SqlRequestIdCommenter.addRequestIdComment(SQL)); 28 | MDC.clearRequestId(); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /nab-common/src/main/java/ru/hh/nab/common/mdc/MDC.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.common.mdc; 2 | 3 | import java.util.Optional; 4 | 5 | public class MDC { 6 | public static final String REQUEST_ID_MDC_KEY = "rid"; 7 | public static final String CONTROLLER_MDC_KEY = "controller"; 8 | 9 | public static Optional getRequestId() { 10 | return getKey(REQUEST_ID_MDC_KEY); 11 | } 12 | 13 | public static void setRequestId(String rid) { 14 | setKey(REQUEST_ID_MDC_KEY, rid); 15 | } 16 | 17 | public static void clearRequestId() { 18 | deleteKey(REQUEST_ID_MDC_KEY); 19 | } 20 | 21 | public static Optional getController() { 22 | return getKey(CONTROLLER_MDC_KEY); 23 | } 24 | 25 | public static void setController(String controller) { 26 | setKey(CONTROLLER_MDC_KEY, controller); 27 | } 28 | 29 | public static void clearController() { 30 | deleteKey(CONTROLLER_MDC_KEY); 31 | } 32 | 33 | public static Optional getKey(String key) { 34 | return Optional.ofNullable(org.slf4j.MDC.get(key)); 35 | } 36 | 37 | public static void setKey(String key, String value) { 38 | org.slf4j.MDC.put(key, value); 39 | } 40 | 41 | public static void deleteKey(String key) { 42 | org.slf4j.MDC.remove(key); 43 | } 44 | 45 | private MDC() { 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Nuts-and-Bolts 2 | 3 | Nuts-and-Bolts is a set of small Java libraries which are used in [hh.ru](https://hh.ru) to create micro-services. 4 | 5 | ## Main features 6 | 7 | * Extended configuration of Jetty: 8 | * fail-fast (does not accept new connections if all threads are busy) 9 | * built-in off-heap server cache 10 | * built-in monitoring 11 | * logging filters 12 | 13 | * JDBC: 14 | * using multiple data sources (master, slaves) 15 | * built-in monitoring of data source, connections and call stack before statement 16 | * timed-out statements 17 | * Embedded PostgreSQL for unit-testing 18 | 19 | * Hibernate: 20 | * transaction support for multiple data sources 21 | * extended logging for queries (requestId, controller name) 22 | 23 | * Kafka-integration: 24 | * consumers/producers configuration 25 | 26 | ## Dependency management policy 27 | All crucial dependencies in any NaB module have to be managed with [parent pom dependency management](https://github.com/hhru/nuts-and-bolts/blob/master/pom.xml#L49-L55) 28 | to be able to provide these versions into app via pom import and guarantee version consistency with NaB required versions 29 | [How to release a new version](https://github.com/hhru/nuts-and-bolts/wiki/How-to-release-a-new-version) 30 | 31 | -------------------------------------------------------------------------------- /nab-data-source/src/test/java/ru/hh/nab/datasource/TransactionAssertions.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource; 2 | 3 | import static org.junit.jupiter.api.Assertions.assertFalse; 4 | import static org.junit.jupiter.api.Assertions.assertTrue; 5 | import static org.springframework.transaction.support.TransactionSynchronizationManager.isActualTransactionActive; 6 | import static org.springframework.transaction.support.TransactionSynchronizationManager.isCurrentTransactionReadOnly; 7 | import static org.springframework.transaction.support.TransactionSynchronizationManager.isSynchronizationActive; 8 | 9 | public final class TransactionAssertions { 10 | 11 | private TransactionAssertions() { 12 | } 13 | 14 | public static void assertReadWriteTransaction() { 15 | assertTrue(isSynchronizationActive()); 16 | assertTrue(isActualTransactionActive()); 17 | assertFalse(isCurrentTransactionReadOnly()); 18 | } 19 | 20 | public static void assertReadOnlyTransaction() { 21 | assertTrue(isSynchronizationActive()); 22 | assertFalse(isActualTransactionActive()); // means no transaction when @Transactional(readOnly=true) is used 23 | } 24 | 25 | public static void assertActualTransactionIsNotActive() { 26 | assertFalse(isSynchronizationActive()); 27 | assertFalse(isActualTransactionActive()); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /nab-testbase/src/main/java/ru/hh/nab/testbase/kafka/NoopKafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.testbase.kafka; 2 | 3 | import java.util.Collection; 4 | import java.util.List; 5 | import org.apache.kafka.clients.consumer.Consumer; 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import org.apache.kafka.common.TopicPartition; 8 | import ru.hh.nab.kafka.consumer.KafkaConsumer; 9 | 10 | public class NoopKafkaConsumer extends KafkaConsumer { 11 | 12 | public NoopKafkaConsumer() { 13 | super(null, null, null, null, null, null, null, null); 14 | } 15 | 16 | @Override 17 | public void start() { 18 | // do nothing 19 | } 20 | 21 | @Override 22 | public void stop(Runnable callback) { 23 | // do nothing 24 | } 25 | 26 | @Override 27 | public void stop() { 28 | // do nothing 29 | } 30 | 31 | @Override 32 | public Collection getAssignedPartitions() { 33 | return List.of(); 34 | } 35 | 36 | @Override 37 | public void onMessagesBatch(List> messages, Consumer consumer) { 38 | // do nothing 39 | } 40 | 41 | @Override 42 | protected void createNewSpringContainer() { 43 | // do nothing 44 | } 45 | 46 | @Override 47 | public void rewindToLastAckedOffset(Consumer consumer) { 48 | // do nothing 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /nab-common/src/main/java/ru/hh/nab/common/servlet/ServletFilterPriorities.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.common.servlet; 2 | 3 | /** 4 | * Priority constants for servlet filters. 5 | * It's a copy of {@link ru.hh.nab.web.jersey.NabPriorities}. 6 | * Please note servlet filters are invoked before jax-rs filters. 7 | */ 8 | public final class ServletFilterPriorities { 9 | 10 | /** 11 | * Observability filter/interceptor priority. 12 | */ 13 | public static final int OBSERVABILITY = 500; 14 | 15 | /** 16 | * Cache filter/interceptor priority. 17 | */ 18 | public static final int CACHE = 750; 19 | 20 | /** 21 | * Security authentication filter/interceptor priority. 22 | */ 23 | public static final int AUTHENTICATION = 1000; 24 | 25 | /** 26 | * Security authorization filter/interceptor priority. 27 | */ 28 | public static final int AUTHORIZATION = 2000; 29 | 30 | /** 31 | * Header decorator filter/interceptor priority. 32 | */ 33 | public static final int HEADER_DECORATOR = 3000; 34 | 35 | /** 36 | * Message encoder or decoder filter/interceptor priority. 37 | */ 38 | public static final int ENTITY_CODER = 4000; 39 | 40 | /** 41 | * User-level filter/interceptor priority. 42 | */ 43 | public static final int USER = 5000; 44 | 45 | private ServletFilterPriorities() { 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/jersey/filter/cache/CachingOutputStream.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.filter.cache; 2 | 3 | import java.io.ByteArrayOutputStream; 4 | import java.io.IOException; 5 | import java.io.OutputStream; 6 | 7 | public class CachingOutputStream extends OutputStream { 8 | private final ByteArrayOutputStream content = new ByteArrayOutputStream(1024); 9 | 10 | private final OutputStream delegate; 11 | 12 | public CachingOutputStream(OutputStream delegate) { 13 | this.delegate = delegate; 14 | } 15 | 16 | @Override 17 | public void write(int b) throws IOException { 18 | delegate.write(b); 19 | content.write(b); 20 | } 21 | 22 | @Override 23 | public void write(byte[] b) throws IOException { 24 | delegate.write(b); 25 | content.write(b); 26 | } 27 | 28 | @Override 29 | public void write(byte[] b, int off, int len) throws IOException { 30 | delegate.write(b, off, len); 31 | content.write(b, off, len); 32 | } 33 | 34 | @Override 35 | public void flush() throws IOException { 36 | delegate.flush(); 37 | content.flush(); 38 | } 39 | 40 | @Override 41 | public void close() throws IOException { 42 | delegate.close(); 43 | content.close(); 44 | } 45 | 46 | public byte[] getContentAsByteArray() { 47 | return content.toByteArray(); 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/http/RequestContext.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.http; 2 | 3 | public class RequestContext { 4 | private RequestContext() {} 5 | 6 | private static final ThreadLocal REQUEST_SOURCE = new ThreadLocal<>(); 7 | private static final ThreadLocal LOAD_TESTING = new ThreadLocal<>(); 8 | private static final ThreadLocal OUTER_TIMEOUT = new ThreadLocal<>(); 9 | 10 | public static String getRequestSource() { 11 | return REQUEST_SOURCE.get(); 12 | } 13 | 14 | public static void setRequestSource(String source) { 15 | REQUEST_SOURCE.set(source); 16 | } 17 | 18 | public static void clearRequestSource() { 19 | REQUEST_SOURCE.remove(); 20 | } 21 | 22 | public static boolean isLoadTesting() { 23 | return Boolean.TRUE.equals(LOAD_TESTING.get()); 24 | } 25 | 26 | public static void setLoadTesting(boolean isLoadTesting) { 27 | LOAD_TESTING.set(isLoadTesting); 28 | } 29 | 30 | public static void clearLoadTesting() { 31 | LOAD_TESTING.remove(); 32 | } 33 | 34 | public static Long getOuterTimeoutMs() { 35 | return OUTER_TIMEOUT.get(); 36 | } 37 | 38 | public static void setOuterTimeoutMs(Long outerTimeoutMs) { 39 | OUTER_TIMEOUT.set(outerTimeoutMs); 40 | } 41 | 42 | public static void clearOuterTimeout() { 43 | OUTER_TIMEOUT.remove(); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /nab-web/src/test/java/ru/hh/nab/web/jersey/resolver/variants/PartiallyOverflowingCacheOptional.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.resolver.variants; 2 | 3 | import java.util.Optional; 4 | import java.util.concurrent.ConcurrentHashMap; 5 | import java.util.function.Function; 6 | import org.springframework.util.ConcurrentReferenceHashMap; 7 | 8 | public class PartiallyOverflowingCacheOptional implements GenericCache { 9 | private final ConcurrentHashMap strongStorage = new ConcurrentHashMap<>(); 10 | private final ConcurrentReferenceHashMap weakStorage = new ConcurrentReferenceHashMap<>(16, 0.75f, 1, 11 | ConcurrentReferenceHashMap.ReferenceType.SOFT); 12 | private final int strongStorageMaxSize; 13 | 14 | public PartiallyOverflowingCacheOptional(int strongStorageMaxSize) { 15 | this.strongStorageMaxSize = strongStorageMaxSize; 16 | } 17 | 18 | public int getStorageSize() { 19 | return strongStorage.size() + weakStorage.size(); 20 | } 21 | 22 | public V computeIfAbsent(K key, Function mappingFunction) { 23 | if (strongStorage.mappingCount() < strongStorageMaxSize) { 24 | return strongStorage.computeIfAbsent(key, mappingFunction); 25 | } 26 | 27 | return Optional.ofNullable(strongStorage.get(key)).orElseGet(() -> weakStorage.computeIfAbsent(key, mappingFunction)); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/src/test/java/ru/hh/nab/telemetry/jdbc/internal/extractor/ConnectionSpanNameExtractorTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry.jdbc.internal.extractor; 2 | 3 | import com.zaxxer.hikari.HikariDataSource; 4 | import static org.junit.jupiter.api.Assertions.assertEquals; 5 | import org.junit.jupiter.api.Test; 6 | import static ru.hh.nab.telemetry.jdbc.internal.extractor.ConnectionSpanNameExtractor.GET_CONNECTION; 7 | import ru.hh.nab.telemetry.jdbc.internal.model.NabDataSourceInfo; 8 | 9 | public class ConnectionSpanNameExtractorTest { 10 | 11 | private static final ConnectionSpanNameExtractor connectionSpanNameExtractor = new ConnectionSpanNameExtractor(); 12 | 13 | @Test 14 | public void testExtractContainsNameIfSpecifiedInInfo() { 15 | var info = new NabDataSourceInfo().setDataSource(new HikariDataSource()).setDataSourceName("readonly").setWritableDataSource(false); 16 | String extracted = connectionSpanNameExtractor.extract(info); 17 | assertEquals(GET_CONNECTION + " readonly", extracted); 18 | } 19 | 20 | @Test 21 | public void testExtractContainsDataSourceClassNameIfNameDoesntSpecifiedInInfo() { 22 | var info = new NabDataSourceInfo().setDataSource(new HikariDataSource()); 23 | String extracted = connectionSpanNameExtractor.extract(info); 24 | assertEquals(GET_CONNECTION + " HikariDataSource", extracted); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /nab-web/src/test/java/ru/hh/nab/web/jersey/resolver/variants/PartiallyOverflowingCache.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.resolver.variants; 2 | 3 | import java.util.concurrent.ConcurrentHashMap; 4 | import java.util.function.Function; 5 | import org.springframework.util.ConcurrentReferenceHashMap; 6 | 7 | public class PartiallyOverflowingCache implements GenericCache { 8 | private final ConcurrentHashMap strongStorage = new ConcurrentHashMap<>(); 9 | private final ConcurrentReferenceHashMap weakStorage = new ConcurrentReferenceHashMap<>(16, 0.75f, 1, 10 | ConcurrentReferenceHashMap.ReferenceType.SOFT); 11 | private final int strongStorageMaxSize; 12 | 13 | public PartiallyOverflowingCache(int strongStorageMaxSize) { 14 | this.strongStorageMaxSize = strongStorageMaxSize; 15 | } 16 | 17 | public int getStorageSize() { 18 | return strongStorage.size() + weakStorage.size(); 19 | } 20 | 21 | public V computeIfAbsent(K key, Function mappingFunction) { 22 | if (strongStorage.mappingCount() < strongStorageMaxSize) { 23 | return strongStorage.computeIfAbsent(key, mappingFunction); 24 | } 25 | 26 | V value = strongStorage.get(key); 27 | 28 | if (value != null) { 29 | return value; 30 | } 31 | 32 | return weakStorage.computeIfAbsent(key, mappingFunction); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /nab-metrics/src/main/java/ru/hh/nab/metrics/TaggedSender.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.metrics; 2 | 3 | import java.util.Collection; 4 | 5 | public class TaggedSender { 6 | private final Tag[] tags; 7 | private final StatsDSender delegate; 8 | 9 | 10 | public TaggedSender(StatsDSender delegate, Collection tags) { 11 | this.tags = tags.toArray(Tag[]::new); 12 | this.delegate = delegate; 13 | } 14 | 15 | public void sendTime(String metricName, long value) { 16 | delegate.sendTime(metricName, value, tags); 17 | } 18 | 19 | public void sendCount(String metricName, long delta) { 20 | delegate.sendCount(metricName, delta, tags); 21 | } 22 | 23 | public void sendCounters(String metricName, Counters counters) { 24 | delegate.sendCounters(metricName, counters, tags); 25 | } 26 | 27 | public void sendGauge(String metricName, long metric) { 28 | delegate.sendGauge(metricName, metric, tags); 29 | } 30 | 31 | public void sendMax(String metricName, Max max) { 32 | delegate.sendMax(metricName, max, tags); 33 | } 34 | 35 | public void sendHistogram(String metricName, Histogram histogram, int... percentiles) { 36 | delegate.sendHistogram(metricName, tags, histogram, percentiles); 37 | } 38 | 39 | public void sendMoments(String metricName, Moments moments) { 40 | delegate.sendMoments(metricName, moments, tags); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /nab-metrics/src/main/java/ru/hh/nab/metrics/Tag.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.metrics; 2 | 3 | import java.util.Objects; 4 | 5 | /** 6 | * Just a name-value pair that represents a breakdown of a metric.
7 | * For example, url=/vacancy, node=192.168.1.1, db_name=master. 8 | */ 9 | public class Tag extends Tags implements Comparable { 10 | 11 | public static final String APP_TAG_NAME = "app"; 12 | public static final String DATASOURCE_TAG_NAME = "datasource"; 13 | 14 | public final String name; 15 | public final String value; 16 | private final String tag; 17 | 18 | public Tag(String name, String value) { 19 | this.name = name; 20 | this.value = value; 21 | this.tag = name + ": " + value; 22 | } 23 | 24 | @Override 25 | public String toString() { 26 | return tag; 27 | } 28 | 29 | @Override 30 | public int compareTo(Tag otherTag) { 31 | return this.tag.compareTo(otherTag.tag); 32 | } 33 | 34 | @Override 35 | public boolean equals(Object o) { 36 | if (this == o) { 37 | return true; 38 | } 39 | if (o == null || getClass() != o.getClass()) { 40 | return false; 41 | } 42 | Tag otherTag = (Tag) o; 43 | return tag.equals(otherTag.tag); 44 | } 45 | 46 | @Override 47 | public int hashCode() { 48 | return Objects.hashCode(tag); 49 | } 50 | 51 | @Override 52 | Tag[] getTags() { 53 | return new Tag[]{this}; 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /nab-metrics/src/test/java/ru/hh/nab/metrics/CompactHistogramTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.metrics; 2 | 3 | import java.util.Map; 4 | import static org.junit.jupiter.api.Assertions.assertEquals; 5 | import org.junit.jupiter.api.Test; 6 | 7 | public class CompactHistogramTest { 8 | 9 | @Test 10 | public void testSave() { 11 | CompactHistogram histogram = new CompactHistogram(128, 10); 12 | // 0 -> 0 13 | histogram.save(0); 14 | // 4 -> 8 15 | histogram.save(4); 16 | // 6 -> 8 17 | histogram.save(6); 18 | // 7 -> 8 19 | histogram.save(7); 20 | // 8 -> 8 21 | histogram.save(8); 22 | // 100 -> 104 23 | histogram.save(100); 24 | 25 | Map values = histogram.getValueToCountAndReset(); 26 | assertEquals(3, values.size()); 27 | assertEquals(values.get(0), 1); 28 | assertEquals(values.get(8), 4); 29 | assertEquals(values.get(104), 1); 30 | } 31 | 32 | @Test 33 | public void testSaveWithCompactionRatio1() { 34 | CompactHistogram histogram = new CompactHistogram(1024, 1); 35 | int valuesCount = 1024; 36 | for (int value = 0; value < valuesCount; value++) { 37 | histogram.save(value); 38 | } 39 | 40 | Map values = histogram.getValueToCountAndReset(); 41 | assertEquals(valuesCount, values.size()); 42 | for (int value = 0; value < valuesCount; value++) { 43 | assertEquals(1, values.get(value)); 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /nab-testbase/src/main/java/ru/hh/nab/testbase/transaction/TransactionTestBase.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.testbase.transaction; 2 | 3 | import jakarta.inject.Inject; 4 | import org.junit.jupiter.api.extension.ExtendWith; 5 | import org.springframework.transaction.TransactionDefinition; 6 | import org.springframework.transaction.TransactionStatus; 7 | import org.springframework.transaction.support.DefaultTransactionDefinition; 8 | import ru.hh.nab.datasource.transaction.DataSourceContextTransactionManager; 9 | import ru.hh.nab.testbase.extensions.SpringExtensionWithFailFast; 10 | 11 | @ExtendWith(SpringExtensionWithFailFast.class) 12 | public class TransactionTestBase { 13 | 14 | @Inject 15 | protected DataSourceContextTransactionManager transactionManager; 16 | 17 | private static TransactionStatus transactionStatus; 18 | 19 | protected void startTransaction() { 20 | DefaultTransactionDefinition transactionDefinition = new DefaultTransactionDefinition(); 21 | transactionDefinition.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); 22 | transactionStatus = transactionManager.getTransaction(transactionDefinition); 23 | } 24 | 25 | protected void commitTransaction() { 26 | transactionManager.commit(transactionStatus); 27 | transactionStatus = null; 28 | } 29 | 30 | protected void rollBackTransaction() { 31 | transactionManager.rollback(transactionStatus); 32 | transactionStatus = null; 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/src/main/java/ru/hh/nab/telemetry/jdbc/internal/extractor/NabJdbcOperationKindExtractor.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry.jdbc.internal.extractor; 2 | 3 | import io.opentelemetry.api.common.AttributeKey; 4 | import static io.opentelemetry.api.common.AttributeKey.stringKey; 5 | import io.opentelemetry.api.common.AttributesBuilder; 6 | import io.opentelemetry.context.Context; 7 | import io.opentelemetry.instrumentation.api.instrumenter.AttributesExtractor; 8 | import static io.opentelemetry.instrumentation.api.internal.AttributesExtractorUtil.internalSet; 9 | import jakarta.annotation.Nullable; 10 | import ru.hh.nab.telemetry.jdbc.internal.model.JdbcOperationKind; 11 | 12 | public class NabJdbcOperationKindExtractor implements AttributesExtractor { 13 | 14 | private static final AttributeKey JDBC_OPERATION_KIND_ATTRIBUTE_KEY = stringKey("jdbc.operation.kind"); 15 | 16 | private final JdbcOperationKind operationKind; 17 | 18 | public NabJdbcOperationKindExtractor(JdbcOperationKind operationKind) { 19 | this.operationKind = operationKind; 20 | } 21 | 22 | @Override 23 | public void onStart(AttributesBuilder attributes, Context parentContext, REQUEST request) { 24 | internalSet(attributes, JDBC_OPERATION_KIND_ATTRIBUTE_KEY, operationKind.name()); 25 | } 26 | 27 | @Override 28 | public void onEnd(AttributesBuilder attributes, Context context, REQUEST request, @Nullable Void unused, @Nullable Throwable error) {} 29 | } 30 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/ConsumerMetadata.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer; 2 | 3 | import java.util.List; 4 | import static java.util.Objects.requireNonNull; 5 | import java.util.StringJoiner; 6 | import ru.hh.nab.metrics.Tag; 7 | import static ru.hh.nab.metrics.Tag.APP_TAG_NAME; 8 | 9 | public class ConsumerMetadata { 10 | 11 | private final String serviceName; 12 | private final String topic; 13 | private final String operation; 14 | 15 | private final List tags; 16 | 17 | public ConsumerMetadata(String serviceName, String topic, String operation) { 18 | this.serviceName = requireNonNull(serviceName, "serviceName is required"); 19 | this.topic = requireNonNull(topic, "topic is required"); 20 | this.operation = operation != null ? operation : ""; 21 | this.tags = List.of( 22 | new Tag(APP_TAG_NAME, serviceName), 23 | new Tag("topic", topic), 24 | new Tag("operation", operation) 25 | ); 26 | } 27 | 28 | public String getServiceName() { 29 | return serviceName; 30 | } 31 | 32 | public String getTopic() { 33 | return topic; 34 | } 35 | 36 | public String getOperation() { 37 | return operation; 38 | } 39 | 40 | public List toMetricTags() { 41 | return tags; 42 | } 43 | 44 | public String getConsumerGroupId() { 45 | return new StringJoiner("__") 46 | .add(serviceName) 47 | .add(topic) 48 | .add(operation) 49 | .toString(); 50 | } 51 | 52 | } 53 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/retry/MessageProcessingHistory.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer.retry; 2 | 3 | import java.time.Instant; 4 | import java.util.Objects; 5 | 6 | /** 7 | * Message processing history. History is not meant to be created before first failure, 8 | * so all fields are required, and retryNumber should be greater than zero 9 | * 10 | * @param creationTime Time when this message was initially created 11 | * @param retryNumber Number of failures to process this message 12 | * @param lastFailTime Time of last processing failure for this message 13 | */ 14 | public record MessageProcessingHistory( 15 | Instant creationTime, 16 | long retryNumber, 17 | Instant lastFailTime) { 18 | public MessageProcessingHistory(Instant creationTime, long retryNumber, Instant lastFailTime) { 19 | this.creationTime = Objects.requireNonNull(creationTime); 20 | if (retryNumber < 1) { 21 | throw new IllegalArgumentException("retryNumber should not be negative"); 22 | } 23 | this.retryNumber = retryNumber; 24 | this.lastFailTime = Objects.requireNonNull(lastFailTime); 25 | } 26 | 27 | public static MessageProcessingHistory initial(Instant creationTime, Instant lastFailTime) { 28 | return new MessageProcessingHistory(creationTime, 1, lastFailTime); 29 | } 30 | 31 | public MessageProcessingHistory withOneMoreFail(Instant failTime) { 32 | return new MessageProcessingHistory(creationTime, retryNumber + 1, failTime); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/src/main/java/ru/hh/nab/telemetry/jdbc/internal/getter/NabJdbcAttributesGetter.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry.jdbc.internal.getter; 2 | 3 | import io.opentelemetry.instrumentation.api.instrumenter.db.SqlClientAttributesGetter; 4 | import io.opentelemetry.instrumentation.jdbc.internal.JdbcAttributesGetter; 5 | import jakarta.annotation.Nullable; 6 | import ru.hh.nab.telemetry.jdbc.internal.model.NabDbRequest; 7 | 8 | public class NabJdbcAttributesGetter implements SqlClientAttributesGetter { 9 | 10 | private static final JdbcAttributesGetter dbAttributesGetter = new JdbcAttributesGetter(); 11 | 12 | @Nullable 13 | @Override 14 | public String getSystem(NabDbRequest request) { 15 | return dbAttributesGetter.getSystem(request.getDbRequest()); 16 | } 17 | 18 | @Nullable 19 | @Override 20 | public String getUser(NabDbRequest request) { 21 | return dbAttributesGetter.getUser(request.getDbRequest()); 22 | } 23 | 24 | @Nullable 25 | @Override 26 | public String getName(NabDbRequest request) { 27 | return dbAttributesGetter.getName(request.getDbRequest()); 28 | } 29 | 30 | @Nullable 31 | @Override 32 | public String getConnectionString(NabDbRequest request) { 33 | return dbAttributesGetter.getConnectionString(request.getDbRequest()); 34 | } 35 | 36 | @Nullable 37 | @Override 38 | public String getRawStatement(NabDbRequest request) { 39 | return dbAttributesGetter.getRawStatement(request.getDbRequest()); 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/src/main/java/ru/hh/nab/telemetry/jdbc/internal/extractor/DataSourceNameExtractor.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry.jdbc.internal.extractor; 2 | 3 | import io.opentelemetry.instrumentation.api.instrumenter.SpanNameExtractor; 4 | import io.opentelemetry.instrumentation.api.instrumenter.code.CodeAttributesGetter; 5 | import io.opentelemetry.instrumentation.api.instrumenter.code.CodeSpanNameExtractor; 6 | import javax.sql.DataSource; 7 | import ru.hh.nab.telemetry.jdbc.internal.model.NabDataSourceInfo; 8 | 9 | public class DataSourceNameExtractor implements SpanNameExtractor { 10 | 11 | private final static SpanNameExtractor DATA_SOURCE_SPAN_NAME_EXTRACTOR = 12 | CodeSpanNameExtractor.create(new DataSourceCodeAttributesGetter()); 13 | 14 | @Override 15 | public String extract(NabDataSourceInfo nabDataSourceInfo) { 16 | return nabDataSourceInfo 17 | .getDataSourceName() 18 | .orElseGet(() -> DATA_SOURCE_SPAN_NAME_EXTRACTOR.extract(nabDataSourceInfo.getDataSource())); 19 | } 20 | 21 | private static class DataSourceCodeAttributesGetter implements CodeAttributesGetter { 22 | 23 | @Override 24 | public Class getCodeClass(DataSource dataSource) { 25 | return dataSource.getClass(); 26 | } 27 | 28 | @Override 29 | public String getMethodName(DataSource dataSource) { 30 | // force to return only class via CodeSpanNameExtractor without method 31 | return null; 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /nab-telemetry-kafka/src/main/java/ru/hh/nab/telemetry/TelemetryAwareProducerFactory.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry; 2 | 3 | import io.opentelemetry.api.OpenTelemetry; 4 | import java.util.function.Supplier; 5 | import org.springframework.kafka.core.KafkaTemplate; 6 | import ru.hh.nab.kafka.producer.KafkaProducer; 7 | import ru.hh.nab.kafka.producer.KafkaProducerFactory; 8 | import ru.hh.nab.kafka.producer.SerializerSupplier; 9 | import ru.hh.nab.kafka.util.ConfigProvider; 10 | 11 | public class TelemetryAwareProducerFactory extends KafkaProducerFactory { 12 | private final OpenTelemetry telemetry; 13 | 14 | public TelemetryAwareProducerFactory( 15 | ConfigProvider configProvider, 16 | SerializerSupplier serializerSupplier, 17 | OpenTelemetry telemetry, 18 | Supplier bootstrapServersSupplier 19 | ) { 20 | super(configProvider, serializerSupplier, bootstrapServersSupplier); 21 | 22 | this.telemetry = telemetry; 23 | } 24 | 25 | public TelemetryAwareProducerFactory( 26 | ConfigProvider configProvider, 27 | SerializerSupplier serializerSupplier, 28 | OpenTelemetry telemetry 29 | ) { 30 | super(configProvider, serializerSupplier); 31 | 32 | this.telemetry = telemetry; 33 | } 34 | 35 | protected KafkaProducer prepare(KafkaTemplate template) { 36 | return new TelemetryKafkaProducerWrapper( 37 | super.prepare(template), telemetry, configProvider.getKafkaClusterName(), configProvider.getServiceName()); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /nab-logging/src/test/java/ru/hh/nab/logging/ListAppenderTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.logging; 2 | 3 | import ch.qos.logback.classic.Logger; 4 | import ch.qos.logback.classic.LoggerContext; 5 | import org.junit.jupiter.api.AfterEach; 6 | import static org.junit.jupiter.api.Assertions.assertEquals; 7 | import org.junit.jupiter.api.BeforeEach; 8 | import org.junit.jupiter.api.Test; 9 | import org.slf4j.LoggerFactory; 10 | 11 | public class ListAppenderTest { 12 | private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(ListAppenderTest.class); 13 | 14 | private ListAppender listAppender; 15 | 16 | @BeforeEach 17 | public void setUp() { 18 | listAppender = new ListAppender(); 19 | listAppender.start(); 20 | 21 | LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory(); 22 | Logger rootLogger = loggerContext.getLogger(Logger.ROOT_LOGGER_NAME); 23 | rootLogger.addAppender(listAppender); 24 | } 25 | 26 | @AfterEach 27 | public void tearDown() { 28 | LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory(); 29 | Logger rootLogger = loggerContext.getLogger(Logger.ROOT_LOGGER_NAME); 30 | rootLogger.detachAppender(listAppender); 31 | 32 | listAppender.stop(); 33 | } 34 | 35 | @Test 36 | public void testName() { 37 | LOGGER.info("something important"); 38 | 39 | String actualLogLine = listAppender.getLogLineBySubstring("something"); 40 | assertEquals("something important", actualLogLine); 41 | } 42 | } 43 | 44 | -------------------------------------------------------------------------------- /nab-jclient/src/test/java/ru/hh/nab/jclient/JClientContextProviderFilterTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.jclient; 2 | 3 | import jakarta.servlet.FilterChain; 4 | import jakarta.servlet.ServletException; 5 | import jakarta.servlet.http.HttpServletRequest; 6 | import jakarta.servlet.http.HttpServletResponse; 7 | import java.io.IOException; 8 | import java.util.Collections; 9 | import org.junit.jupiter.api.Test; 10 | import static org.mockito.ArgumentMatchers.any; 11 | import static org.mockito.Mockito.mock; 12 | import static org.mockito.Mockito.never; 13 | import static org.mockito.Mockito.verify; 14 | import static org.mockito.Mockito.when; 15 | import ru.hh.jclient.common.HttpClientContextThreadLocalSupplier; 16 | 17 | public class JClientContextProviderFilterTest { 18 | 19 | @Test 20 | public void testInvalidQueryParams() throws IOException, ServletException { 21 | JClientContextProviderFilter filter = new JClientContextProviderFilter(mock(HttpClientContextThreadLocalSupplier.class)); 22 | HttpServletRequest request = mock(HttpServletRequest.class); 23 | HttpServletResponse response = mock(HttpServletResponse.class); 24 | FilterChain chain = mock(FilterChain.class); 25 | 26 | when(request.getHeaderNames()).thenReturn(Collections.emptyEnumeration()); 27 | when(request.getQueryString()).thenReturn("/query?%"); 28 | 29 | filter.doFilter(request, response, chain); 30 | verify(response).sendError(HttpServletResponse.SC_BAD_REQUEST); 31 | verify(chain, never()).doFilter(any(), any()); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /nab-consul/src/main/java/ru/hh/nab/consul/ConsulFetcher.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.consul; 2 | 3 | import java.util.List; 4 | import java.util.Set; 5 | import java.util.stream.Collectors; 6 | import ru.hh.consul.HealthClient; 7 | import ru.hh.consul.option.ImmutableQueryOptions; 8 | import ru.hh.consul.option.QueryOptions; 9 | 10 | public class ConsulFetcher implements HostsFetcher { 11 | private final HealthClient healthClient; 12 | private final String serviceName; 13 | private final List datacenters; 14 | 15 | public ConsulFetcher(HealthClient healthClient, String serviceName, List datacenters) { 16 | this.healthClient = healthClient; 17 | this.serviceName = serviceName; 18 | this.datacenters = datacenters; 19 | } 20 | 21 | @Override 22 | public Set fetchHostsByName(String serviceName) { 23 | return datacenters 24 | .stream() 25 | .flatMap(dc -> healthClient 26 | .getHealthyServiceInstances(serviceName, buildQueryOptions(dc)) 27 | .getResponse() 28 | .stream() 29 | .map(hs -> new HostPort(hs.getActualServiceAddress(), hs.getService().getPort())) 30 | ) 31 | .collect(Collectors.toSet()); 32 | } 33 | 34 | private QueryOptions buildQueryOptions(String datacenter) { 35 | ImmutableQueryOptions.Builder queryOptions = ImmutableQueryOptions 36 | .builder() 37 | .datacenter(datacenter.toLowerCase()) 38 | .caller(serviceName); 39 | 40 | return queryOptions.build(); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /nab-hibernate/src/main/java/ru/hh/nab/hibernate/events/EventListenerRegistryPropagator.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.hibernate.events; 2 | 3 | import jakarta.persistence.EntityManagerFactory; 4 | import jakarta.persistence.spi.PersistenceUnitInfo; 5 | import java.util.List; 6 | import org.hibernate.engine.spi.SessionFactoryImplementor; 7 | import org.hibernate.event.service.spi.EventListenerRegistry; 8 | import org.hibernate.service.spi.ServiceRegistryImplementor; 9 | import ru.hh.nab.jpa.EntityManagerFactoryCreationHandler; 10 | 11 | public class EventListenerRegistryPropagator implements EntityManagerFactoryCreationHandler { 12 | 13 | private final List eventListenerRegistryConsumers; 14 | 15 | public EventListenerRegistryPropagator(List eventListenerRegistryConsumers) { 16 | this.eventListenerRegistryConsumers = eventListenerRegistryConsumers; 17 | } 18 | 19 | @Override 20 | public void accept(EntityManagerFactory entityManagerFactory, PersistenceUnitInfo persistenceUnitInfo) { 21 | if (!eventListenerRegistryConsumers.isEmpty()) { 22 | SessionFactoryImplementor sessionFactory = entityManagerFactory.unwrap(SessionFactoryImplementor.class); 23 | ServiceRegistryImplementor serviceRegistry = sessionFactory.getServiceRegistry(); 24 | EventListenerRegistry eventListenerRegistry = serviceRegistry.getService(EventListenerRegistry.class); 25 | eventListenerRegistryConsumers.forEach(consumer -> consumer.accept(eventListenerRegistry)); 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /nab-web/src/test/java/ru/hh/nab/web/http/CacheUtilsTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.http; 2 | 3 | import jakarta.servlet.http.HttpServletResponse; 4 | import org.eclipse.jetty.http.DateGenerator; 5 | import static org.junit.jupiter.api.Assertions.assertEquals; 6 | import static org.junit.jupiter.api.Assertions.assertNotNull; 7 | import org.junit.jupiter.api.Test; 8 | import org.springframework.mock.web.MockHttpServletResponse; 9 | 10 | public class CacheUtilsTest { 11 | @Test 12 | public void testCacheControl() { 13 | HttpServletResponse response = new MockHttpServletResponse(); 14 | 15 | CacheUtils.applyCache(response, 100); 16 | 17 | assertEquals("max-age=100", response.getHeader(CacheUtils.CACHE_CONTROL)); 18 | assertNotNull(response.getHeader(CacheUtils.EXPIRES)); 19 | } 20 | 21 | @Test 22 | public void testCacheControlZero() { 23 | HttpServletResponse response = new MockHttpServletResponse(); 24 | 25 | CacheUtils.applyCache(response, 0); 26 | 27 | assertEquals("must-revalidate,no-cache,no-store", response.getHeader(CacheUtils.CACHE_CONTROL)); 28 | assertEquals(DateGenerator.formatDate(1), response.getHeader(CacheUtils.EXPIRES)); 29 | } 30 | 31 | @Test 32 | public void testNoCache() { 33 | HttpServletResponse response = new MockHttpServletResponse(); 34 | 35 | CacheUtils.noCache(response); 36 | 37 | assertEquals("must-revalidate,no-cache,no-store", response.getHeader(CacheUtils.CACHE_CONTROL)); 38 | assertEquals(DateGenerator.formatDate(1), response.getHeader(CacheUtils.EXPIRES)); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /nab-web/src/test/java/ru/hh/nab/web/jersey/resolver/variants/PartiallyOverflowingCacheWithSizeCache.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.resolver.variants; 2 | 3 | import java.util.concurrent.ConcurrentHashMap; 4 | import java.util.function.Function; 5 | import org.springframework.util.ConcurrentReferenceHashMap; 6 | 7 | public class PartiallyOverflowingCacheWithSizeCache implements GenericCache { 8 | private final ConcurrentHashMap strongStorage = new ConcurrentHashMap<>(); 9 | private final ConcurrentReferenceHashMap weakStorage = new ConcurrentReferenceHashMap<>(16, 0.75f, 1, 10 | ConcurrentReferenceHashMap.ReferenceType.SOFT); 11 | private final int strongStorageMaxSize; 12 | private boolean strongStorageOverloaded = false; 13 | 14 | public PartiallyOverflowingCacheWithSizeCache(int strongStorageMaxSize) { 15 | this.strongStorageMaxSize = strongStorageMaxSize; 16 | } 17 | 18 | public int getStorageSize() { 19 | return strongStorage.size() + weakStorage.size(); 20 | } 21 | 22 | public V computeIfAbsent(K key, Function mappingFunction) { 23 | if (!strongStorageOverloaded && strongStorage.mappingCount() < strongStorageMaxSize) { 24 | return strongStorage.computeIfAbsent(key, mappingFunction); 25 | } 26 | 27 | if (!strongStorageOverloaded) { 28 | strongStorageOverloaded = true; 29 | } 30 | 31 | V value = strongStorage.get(key); 32 | 33 | if (value != null) { 34 | return value; 35 | } 36 | 37 | return weakStorage.computeIfAbsent(key, mappingFunction); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /nab-consul/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | 5 | ru.hh.nab 6 | nuts-and-bolts-parent 7 | 22.0.5-SNAPSHOT 8 | ../pom.xml 9 | 10 | 11 | nab-consul 12 | jar 13 | 14 | nuts'n'bolts consul 15 | 16 | 17 | 18 | ru.hh.consul 19 | consul-client 20 | 21 | 22 | ru.hh.nab 23 | nab-common 24 | 25 | 26 | ru.hh.nab 27 | nab-metrics 28 | 29 | 30 | 31 | 32 | org.junit.jupiter 33 | junit-jupiter-engine 34 | test 35 | 36 | 37 | org.mockito 38 | mockito-core 39 | test 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/servlet/filter/CommonHeadersFilter.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.servlet.filter; 2 | 3 | import jakarta.servlet.FilterChain; 4 | import jakarta.servlet.ServletException; 5 | import jakarta.servlet.http.HttpServletRequest; 6 | import jakarta.servlet.http.HttpServletResponse; 7 | import java.io.IOException; 8 | import static java.util.Optional.ofNullable; 9 | import org.springframework.web.filter.OncePerRequestFilter; 10 | import static ru.hh.jclient.common.HttpHeaderNames.X_OUTER_TIMEOUT_MS; 11 | import ru.hh.nab.common.constants.RequestHeaders; 12 | import ru.hh.nab.web.http.RequestContext; 13 | 14 | public final class CommonHeadersFilter extends OncePerRequestFilter { 15 | 16 | @Override 17 | protected void doFilterInternal( 18 | HttpServletRequest request, 19 | HttpServletResponse response, 20 | FilterChain filterChain 21 | ) throws ServletException, IOException { 22 | 23 | var source = request.getHeader(RequestHeaders.REQUEST_SOURCE); 24 | var isLoadTesting = request.getHeader(RequestHeaders.LOAD_TESTING) != null; 25 | var outerTimeoutMs = request.getHeader(X_OUTER_TIMEOUT_MS); 26 | 27 | try { 28 | RequestContext.setRequestSource(source); 29 | RequestContext.setLoadTesting(isLoadTesting); 30 | RequestContext.setOuterTimeoutMs(ofNullable(outerTimeoutMs).map(Long::valueOf).orElse(null)); 31 | 32 | filterChain.doFilter(request, response); 33 | 34 | } finally { 35 | RequestContext.clearLoadTesting(); 36 | RequestContext.clearRequestSource(); 37 | RequestContext.clearOuterTimeout(); 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /nab-kafka/src/test/java/ru/hh/nab/kafka/consumer/TopicPartitionsMonitoringTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer; 2 | 3 | import java.time.Duration; 4 | import java.util.List; 5 | import java.util.concurrent.CountDownLatch; 6 | import java.util.concurrent.ExecutionException; 7 | import java.util.concurrent.TimeUnit; 8 | import org.apache.kafka.common.PartitionInfo; 9 | import static org.junit.jupiter.api.Assertions.assertEquals; 10 | import static org.junit.jupiter.api.Assertions.assertTrue; 11 | import org.junit.jupiter.api.Test; 12 | 13 | public class TopicPartitionsMonitoringTest extends KafkaConsumerTestBase { 14 | 15 | @Test 16 | public void testSubsceribeForPartitionsChanges() throws InterruptedException, ExecutionException { 17 | DefaultConsumerFactory defaultConsumerFactory = (DefaultConsumerFactory) consumerFactory; 18 | ClusterMetadataProvider clusterMetadataProvider = new ClusterMetadataProvider(defaultConsumerFactory); 19 | TopicPartitionsMonitoring topicPartitionsMonitoring = new TopicPartitionsMonitoring(clusterMetadataProvider); 20 | 21 | List initialPartitions = clusterMetadataProvider.getPartitionsInfo(topicName); 22 | assertEquals(5, initialPartitions.size()); 23 | 24 | CountDownLatch latch = new CountDownLatch(1); 25 | topicPartitionsMonitoring.subscribeOnPartitionsChange(topicName, Duration.ofSeconds(1), initialPartitions, (newPartitions) -> { 26 | if (newPartitions.size() == 7) { 27 | latch.countDown(); 28 | } 29 | }); 30 | 31 | addPartitions(topicName, 7); 32 | 33 | assertTrue(latch.await(5, TimeUnit.SECONDS)); 34 | } 35 | 36 | } 37 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | ru.hh.nab 5 | nuts-and-bolts-parent 6 | 22.0.5-SNAPSHOT 7 | ../pom.xml 8 | 9 | 4.0.0 10 | 11 | nab-telemetry-jdbc 12 | jar 13 | 14 | nuts'n'bolts telemetry for jdbc 15 | 16 | 17 | 18 | ru.hh.nab 19 | nab-telemetry 20 | 21 | 22 | ru.hh.nab 23 | nab-data-source 24 | 25 | 26 | io.opentelemetry.instrumentation 27 | opentelemetry-jdbc 28 | 29 | 30 | org.junit.jupiter 31 | junit-jupiter-engine 32 | test 33 | 34 | 35 | org.mockito 36 | mockito-junit-jupiter 37 | test 38 | 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /nab-web/src/test/java/ru/hh/nab/web/NabWebTestConfig.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web; 2 | 3 | import java.time.Duration; 4 | import java.time.Instant; 5 | import java.time.temporal.ChronoUnit; 6 | import org.glassfish.jersey.server.ResourceConfig; 7 | import org.glassfish.jersey.servlet.ServletContainer; 8 | import org.springframework.boot.autoconfigure.ImportAutoConfiguration; 9 | import org.springframework.boot.autoconfigure.jersey.JerseyAutoConfiguration; 10 | import org.springframework.boot.autoconfigure.web.servlet.ServletWebServerFactoryAutoConfiguration; 11 | import org.springframework.boot.web.servlet.ServletRegistrationBean; 12 | import org.springframework.context.annotation.Bean; 13 | import org.springframework.context.annotation.Configuration; 14 | import ru.hh.nab.web.resource.StatusResource; 15 | 16 | @Configuration 17 | @ImportAutoConfiguration({ 18 | ServletWebServerFactoryAutoConfiguration.class, 19 | JerseyAutoConfiguration.class 20 | }) 21 | public class NabWebTestConfig { 22 | 23 | public static final String TEST_SERVICE_NAME = "testService"; 24 | public static final String TEST_SERVICE_VERSION = "test-version"; 25 | 26 | @Bean 27 | public ServletRegistrationBean statusServlet() { 28 | Instant started = Instant.now().minus(5L, ChronoUnit.SECONDS); 29 | StatusResource statusResource = new StatusResource( 30 | TEST_SERVICE_NAME, 31 | TEST_SERVICE_VERSION, 32 | () -> Duration.between(started, Instant.now()) 33 | ); 34 | return new ServletRegistrationBean<>( 35 | new ServletContainer(new ResourceConfig().register(statusResource)), 36 | "/status" 37 | ); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /nab-web/src/test/java/ru/hh/nab/web/jersey/resolver/variants/PartiallyOverflowingCacheWithSizeAtomicCache.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.resolver.variants; 2 | 3 | import java.util.concurrent.ConcurrentHashMap; 4 | import java.util.concurrent.atomic.AtomicBoolean; 5 | import java.util.function.Function; 6 | import org.springframework.util.ConcurrentReferenceHashMap; 7 | 8 | public class PartiallyOverflowingCacheWithSizeAtomicCache implements GenericCache { 9 | private final ConcurrentHashMap strongStorage = new ConcurrentHashMap<>(); 10 | private final ConcurrentReferenceHashMap weakStorage = new ConcurrentReferenceHashMap<>(16, 0.75f, 1, 11 | ConcurrentReferenceHashMap.ReferenceType.SOFT); 12 | private final int strongStorageMaxSize; 13 | private final AtomicBoolean strongStorageOverloaded = new AtomicBoolean(false); 14 | 15 | public PartiallyOverflowingCacheWithSizeAtomicCache(int strongStorageMaxSize) { 16 | this.strongStorageMaxSize = strongStorageMaxSize; 17 | } 18 | 19 | public int getStorageSize() { 20 | return strongStorage.size() + weakStorage.size(); 21 | } 22 | 23 | public V computeIfAbsent(K key, Function mappingFunction) { 24 | if (!strongStorageOverloaded.get() && strongStorage.mappingCount() < strongStorageMaxSize) { 25 | return strongStorage.computeIfAbsent(key, mappingFunction); 26 | } 27 | 28 | strongStorageOverloaded.compareAndSet(false, true); 29 | 30 | V value = strongStorage.get(key); 31 | 32 | if (value != null) { 33 | return value; 34 | } 35 | 36 | return weakStorage.computeIfAbsent(key, mappingFunction); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /nab-hibernate/src/main/java/ru/hh/nab/hibernate/adapter/NabHibernatePersistenceProvider.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.hibernate.adapter; 2 | 3 | import jakarta.persistence.EntityManagerFactory; 4 | import jakarta.persistence.spi.PersistenceUnitInfo; 5 | import java.util.List; 6 | import java.util.Map; 7 | import org.hibernate.boot.registry.BootstrapServiceRegistry; 8 | import org.hibernate.boot.registry.StandardServiceRegistryBuilder; 9 | import org.hibernate.jpa.HibernatePersistenceProvider; 10 | import org.hibernate.jpa.boot.internal.EntityManagerFactoryBuilderImpl; 11 | import org.hibernate.jpa.boot.internal.PersistenceUnitInfoDescriptor; 12 | import org.hibernate.service.spi.ServiceContributor; 13 | 14 | public class NabHibernatePersistenceProvider extends HibernatePersistenceProvider { 15 | 16 | private final List serviceContributors; 17 | 18 | public NabHibernatePersistenceProvider(List serviceContributors) { 19 | this.serviceContributors = serviceContributors; 20 | } 21 | 22 | @Override 23 | public EntityManagerFactory createContainerEntityManagerFactory(PersistenceUnitInfo info, Map properties) { 24 | return new EntityManagerFactoryBuilderImpl(new PersistenceUnitInfoDescriptor(info), properties) { 25 | @Override 26 | protected StandardServiceRegistryBuilder getStandardServiceRegistryBuilder(BootstrapServiceRegistry bsr) { 27 | StandardServiceRegistryBuilder ssrBuilder = super.getStandardServiceRegistryBuilder(bsr); 28 | serviceContributors.forEach(serviceContributor -> serviceContributor.contribute(ssrBuilder)); 29 | return ssrBuilder; 30 | } 31 | }.build(); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /nab-metrics/src/main/java/ru/hh/nab/metrics/UniformHistogram.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.metrics; 2 | 3 | import java.util.concurrent.ThreadLocalRandom; 4 | import java.util.concurrent.atomic.AtomicInteger; 5 | import java.util.concurrent.atomic.AtomicLongArray; 6 | 7 | /** 8 | * An aggregator that accumulates a stream of values as a histogram to compute percentiles.
9 | * For example, response times. 10 | */ 11 | public class UniformHistogram { 12 | private final AtomicInteger count = new AtomicInteger(); 13 | private final AtomicLongArray values; 14 | 15 | /** 16 | * @param maxHistogramSize an upper limit on the number of different metric values.
17 | * If there are too many different values we can consume too much memory.
18 | * To prevent this, when maxHistogramSize is reached, messages will be sampled. 19 | */ 20 | public UniformHistogram(int maxHistogramSize) { 21 | this.values = new AtomicLongArray(maxHistogramSize); 22 | } 23 | 24 | public void save(long value) { 25 | final long c = count.incrementAndGet(); 26 | if (c <= values.length()) { 27 | values.set((int) c - 1, value); 28 | } else { 29 | final long r = ThreadLocalRandom.current().nextLong(c); 30 | if (r < values.length()) { 31 | values.set((int) r, value); 32 | } 33 | } 34 | } 35 | 36 | public long[] getValuesAndReset() { 37 | int size = values.length(); 38 | long c = count.getAndSet(0); 39 | if (c < size) { 40 | size = (int) c; 41 | } 42 | 43 | long[] data = new long[size]; 44 | for (int i = 0; i < size; i++) { 45 | data[i] = values.get(i); 46 | } 47 | 48 | return data; 49 | } 50 | } 51 | 52 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/src/main/java/ru/hh/nab/telemetry/jdbc/internal/model/NabDataSourceInfo.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry.jdbc.internal.model; 2 | 3 | import io.opentelemetry.api.common.AttributeKey; 4 | import static io.opentelemetry.api.common.AttributeKey.booleanKey; 5 | import static io.opentelemetry.api.common.AttributeKey.stringKey; 6 | import jakarta.annotation.Nullable; 7 | import java.util.Optional; 8 | import javax.sql.DataSource; 9 | 10 | public class NabDataSourceInfo { 11 | 12 | public static final AttributeKey DATASOURCE_NAME_ATTRIBUTE_KEY = stringKey("datasource.name"); 13 | public static final AttributeKey DATASOURCE_WRITABLE_ATTRIBUTE_KEY = booleanKey("datasource.writable"); 14 | 15 | private DataSource dataSource; 16 | @Nullable 17 | private String dataSourceName; 18 | @Nullable 19 | private Boolean writableDataSource; 20 | 21 | public DataSource getDataSource() { 22 | return dataSource; 23 | } 24 | 25 | public NabDataSourceInfo setDataSource(DataSource dataSource) { 26 | this.dataSource = dataSource; 27 | return this; 28 | } 29 | 30 | public Optional getDataSourceName() { 31 | return Optional.ofNullable(dataSourceName); 32 | } 33 | 34 | public NabDataSourceInfo setDataSourceName(@Nullable String dataSourceName) { 35 | this.dataSourceName = dataSourceName; 36 | return this; 37 | } 38 | 39 | public Optional isWritableDataSource() { 40 | return Optional.ofNullable(writableDataSource); 41 | } 42 | 43 | public NabDataSourceInfo setWritableDataSource(@Nullable Boolean writableDataSource) { 44 | this.writableDataSource = writableDataSource; 45 | return this; 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/PartialAck.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer; 2 | 3 | import java.util.Collection; 4 | import org.apache.kafka.clients.consumer.ConsumerRecord; 5 | 6 | class PartialAck implements Ack { 7 | private final Ack delegate; 8 | private final Collection> messagesReadyForAcknowledge; 9 | 10 | public PartialAck(Ack delegate, Collection> messagesReadyForAcknowledge) { 11 | this.delegate = delegate; 12 | this.messagesReadyForAcknowledge = messagesReadyForAcknowledge; 13 | } 14 | 15 | @Override 16 | public void acknowledge() { 17 | delegate.acknowledge(messagesReadyForAcknowledge); 18 | } 19 | 20 | @Override 21 | public void acknowledge(ConsumerRecord message) { 22 | delegate.acknowledge(message); 23 | } 24 | 25 | @Override 26 | public void sendToDlq(ConsumerRecord message) { 27 | delegate.sendToDlq(message); 28 | } 29 | 30 | @Override 31 | public void acknowledge(Collection> messages) { 32 | delegate.acknowledge(messages); 33 | } 34 | 35 | @Override 36 | public void sendToDlq(Collection> messages) { 37 | delegate.sendToDlq(messages); 38 | } 39 | 40 | @Override 41 | public void seek(ConsumerRecord message) { 42 | delegate.seek(message); 43 | } 44 | 45 | @Override 46 | public void commit(Collection> messages) { 47 | delegate.commit(messages); 48 | } 49 | 50 | @Override 51 | public void retry(ConsumerRecord message, Throwable error) { 52 | delegate.retry(message, error); 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /nab-telemetry-jdbc/src/main/java/ru/hh/nab/telemetry/jdbc/internal/extractor/NabDataSourceInfoExtractor.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.telemetry.jdbc.internal.extractor; 2 | 3 | import io.opentelemetry.api.common.AttributesBuilder; 4 | import io.opentelemetry.context.Context; 5 | import io.opentelemetry.instrumentation.api.instrumenter.AttributesExtractor; 6 | import static io.opentelemetry.instrumentation.api.internal.AttributesExtractorUtil.internalSet; 7 | import jakarta.annotation.Nullable; 8 | import java.util.function.Function; 9 | import ru.hh.nab.telemetry.jdbc.internal.model.NabDataSourceInfo; 10 | 11 | public class NabDataSourceInfoExtractor implements AttributesExtractor { 12 | 13 | private final Function nabDataSourceInfoGetter; 14 | 15 | public NabDataSourceInfoExtractor(Function nabDataSourceInfoGetter) { 16 | this.nabDataSourceInfoGetter = nabDataSourceInfoGetter; 17 | } 18 | 19 | @Override 20 | public void onStart(AttributesBuilder attributes, Context parentContext, REQUEST request) { 21 | nabDataSourceInfoGetter 22 | .apply(request) 23 | .getDataSourceName() 24 | .ifPresent(dataSourceName -> internalSet(attributes, NabDataSourceInfo.DATASOURCE_NAME_ATTRIBUTE_KEY, dataSourceName)); 25 | nabDataSourceInfoGetter 26 | .apply(request) 27 | .isWritableDataSource() 28 | .ifPresent(writable -> internalSet(attributes, NabDataSourceInfo.DATASOURCE_WRITABLE_ATTRIBUTE_KEY, writable)); 29 | } 30 | 31 | @Override 32 | public void onEnd(AttributesBuilder attributes, Context context, REQUEST request, @Nullable Void response, @Nullable Throwable error) {} 33 | } 34 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/jersey/resolver/PartiallyOverflowingCache.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.resolver; 2 | 3 | import java.util.concurrent.ConcurrentHashMap; 4 | import java.util.function.Function; 5 | import org.springframework.util.ConcurrentReferenceHashMap; 6 | 7 | class PartiallyOverflowingCache { 8 | private final ConcurrentHashMap strongStorage = new ConcurrentHashMap<>(); 9 | private final ConcurrentReferenceHashMap weakStorage = new ConcurrentReferenceHashMap<>(16, 0.75f, 1, 10 | ConcurrentReferenceHashMap.ReferenceType.SOFT); 11 | private final int strongStorageMaxSize; 12 | 13 | PartiallyOverflowingCache(int strongStorageMaxSize) { 14 | this.strongStorageMaxSize = strongStorageMaxSize; 15 | } 16 | 17 | int getStorageSize() { 18 | return strongStorage.size() + weakStorage.size(); 19 | } 20 | 21 | /** 22 | * Может позволить себе допустить гонки потому что: 23 | * а) количество лишних записей не будет превышать количество потоков, одновременно записывающих разные ключи 24 | * б) городить на этом этапе дополнительную синхронизацию кажется избыточным, т.к. переполнение это "аварийный" режим работы кэша. 25 | * jmh test {@link ru.hh.nab.web.jersey.resolver.PartiallyOverflowingCachePerformanceTest} 26 | */ 27 | V computeIfAbsent(K key, Function mappingFunction) { 28 | if (strongStorage.mappingCount() < strongStorageMaxSize) { 29 | return strongStorage.computeIfAbsent(key, mappingFunction); 30 | } 31 | 32 | V value = strongStorage.get(key); 33 | 34 | if (value != null) { 35 | return value; 36 | } 37 | 38 | return weakStorage.computeIfAbsent(key, mappingFunction); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /nab-common/src/test/java/ru/hh/nab/common/util/ExceptionUtilsTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.common.util; 2 | 3 | import static org.junit.jupiter.api.Assertions.assertSame; 4 | import static org.junit.jupiter.api.Assertions.assertThrows; 5 | import static org.junit.jupiter.api.Assertions.assertTrue; 6 | import org.junit.jupiter.api.Test; 7 | 8 | public class ExceptionUtilsTest { 9 | 10 | @Test 11 | public void testGetOrThrowException() { 12 | var initialException = new Exception(); 13 | var thrownException = throwWithGetOrThrow(initialException); 14 | 15 | assertSame(initialException, thrownException.getCause()); 16 | } 17 | 18 | @Test 19 | public void testGetOrThrowInterruptedException() { 20 | var initialException = new InterruptedException(); 21 | var thrownException = throwWithGetOrThrow(initialException); 22 | 23 | assertTrue(Thread.currentThread().isInterrupted()); 24 | assertSame(initialException, thrownException.getCause()); 25 | } 26 | 27 | @Test 28 | public void testGetOrThrowRuntimeException() { 29 | var initialException = new RuntimeException(); 30 | var thrownException = throwWithGetOrThrow(initialException); 31 | 32 | assertSame(initialException, thrownException); 33 | } 34 | 35 | @Test 36 | public void testGetOrThrowCheckedMapping() { 37 | assertThrows(IllegalArgumentException.class, () -> ExceptionUtils.getOrThrow(() -> { 38 | throw new Exception(); 39 | }, IllegalArgumentException::new)); 40 | } 41 | 42 | private static RuntimeException throwWithGetOrThrow(Exception initialException) { 43 | return assertThrows(RuntimeException.class, () -> ExceptionUtils.getOrThrow(() -> { 44 | throw initialException; 45 | })); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/producer/DefaultKafkaProducer.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.producer; 2 | 3 | import java.util.concurrent.CompletableFuture; 4 | import java.util.concurrent.Executor; 5 | import java.util.function.Function; 6 | import org.apache.kafka.clients.producer.ProducerRecord; 7 | import org.springframework.kafka.core.KafkaTemplate; 8 | import org.springframework.kafka.support.SendResult; 9 | 10 | public class DefaultKafkaProducer extends KafkaProducer { 11 | 12 | private final KafkaTemplate kafkaTemplate; 13 | 14 | DefaultKafkaProducer(KafkaTemplate kafkaTemplate) { 15 | this.kafkaTemplate = kafkaTemplate; 16 | } 17 | 18 | @Override 19 | @SuppressWarnings("unchecked") 20 | public CompletableFuture> sendMessage(ProducerRecord record, Executor executor) { 21 | return CompletableFuture 22 | .supplyAsync(() -> kafkaTemplate.send((ProducerRecord) record), executor) 23 | .thenCompose(Function.identity()) 24 | .thenApply(this::convertSpringSendResult); 25 | } 26 | 27 | private KafkaSendResult convertSpringSendResult(SendResult springResult) { 28 | return new KafkaSendResult<>( 29 | convertProducerRecord(springResult.getProducerRecord()), 30 | springResult.getRecordMetadata() 31 | ); 32 | } 33 | 34 | private ProducerRecord convertProducerRecord(ProducerRecord initial) { 35 | return new ProducerRecord<>( 36 | initial.topic(), 37 | initial.partition(), 38 | initial.timestamp(), 39 | initial.key(), 40 | (T) initial.value(), 41 | initial.headers() 42 | ); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /nab-kafka/src/main/java/ru/hh/nab/kafka/consumer/FailFastDefaultKafkaConsumerFactory.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | import java.util.Properties; 6 | import java.util.function.Supplier; 7 | import org.apache.kafka.clients.consumer.Consumer; 8 | import org.apache.kafka.common.PartitionInfo; 9 | import org.apache.kafka.common.serialization.Deserializer; 10 | import org.springframework.kafka.core.DefaultKafkaConsumerFactory; 11 | 12 | class FailFastDefaultKafkaConsumerFactory extends DefaultKafkaConsumerFactory { 13 | 14 | private final String topicName; 15 | 16 | 17 | public FailFastDefaultKafkaConsumerFactory( 18 | String topicName, 19 | Map configs, 20 | Deserializer keyDeserializer, 21 | Deserializer valueDeserializer, 22 | Supplier bootstrapServersSupplier 23 | ) { 24 | super(configs, keyDeserializer, valueDeserializer); 25 | this.topicName = topicName; 26 | this.setBootstrapServersSupplier(bootstrapServersSupplier); 27 | } 28 | 29 | @Override 30 | protected Consumer createKafkaConsumer(String groupId, String clientIdPrefix, String clientIdSuffixArg, Properties properties) { 31 | Consumer kafkaConsumer = super.createKafkaConsumer(groupId, clientIdPrefix, clientIdSuffixArg, properties); 32 | List partitions = kafkaConsumer.partitionsFor(topicName); // fail if user is not authorized to access topic 33 | if (partitions == null || partitions.isEmpty()) { 34 | // fail if topic does not exist 35 | throw new IllegalStateException(String.format("Failed to find any partition for topic %s", topicName)); 36 | } 37 | return kafkaConsumer; 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /nab-metrics/src/main/java/ru/hh/nab/metrics/executor/ThreadDiagnosticRejectedExecutionHandler.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.metrics.executor; 2 | 3 | import java.time.Duration; 4 | import java.util.concurrent.RejectedExecutionException; 5 | import java.util.concurrent.RejectedExecutionHandler; 6 | import java.util.concurrent.ThreadPoolExecutor; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | import ru.hh.nab.common.util.ThreadDumpWriter; 10 | 11 | public class ThreadDiagnosticRejectedExecutionHandler implements RejectedExecutionHandler { 12 | 13 | private static final Logger LOGGER = LoggerFactory.getLogger(ThreadDiagnosticRejectedExecutionHandler.class); 14 | private static final int THREAD_DUMP_PRINTING_MIN_DELAY_IN_MINUTES = 5; 15 | private static final int THREAD_DUMP_LINES_IN_LOG_MESSAGE = 500; 16 | 17 | private static final ThreadDumpWriter THREAD_DUMP_WRITER = new ThreadDumpWriter("RejectedExecutionDiagnostic", 18 | THREAD_DUMP_LINES_IN_LOG_MESSAGE, Duration.ofMinutes(THREAD_DUMP_PRINTING_MIN_DELAY_IN_MINUTES).toMillis()); 19 | 20 | @Override 21 | public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { 22 | String threadPoolPhrase; 23 | if (executor instanceof MonitoredThreadPoolExecutor) { 24 | threadPoolPhrase = ((MonitoredThreadPoolExecutor) executor).getThreadPoolName() + " thread pool"; 25 | } else { 26 | threadPoolPhrase = "Thread pool"; 27 | } 28 | LOGGER.warn("{} is low on threads: size={}, activeCount={}, queueSize={}", 29 | threadPoolPhrase, executor.getPoolSize(), executor.getActiveCount(), executor.getQueue().size()); 30 | 31 | THREAD_DUMP_WRITER.tryDumpThreads(); 32 | 33 | throw new RejectedExecutionException(threadPoolPhrase + " is low on threads"); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /nab-hibernate/src/test/java/ru/hh/nab/hibernate/interceptor/ControllerPassingInterceptorTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.hibernate.interceptor; 2 | 3 | import org.junit.jupiter.api.AfterEach; 4 | import static org.junit.jupiter.api.Assertions.assertEquals; 5 | import static org.junit.jupiter.api.Assertions.assertFalse; 6 | import org.junit.jupiter.api.Test; 7 | import ru.hh.nab.common.mdc.MDC; 8 | 9 | public class ControllerPassingInterceptorTest { 10 | private static final ControllerPassingInterceptor controllerPassingInterceptor = new ControllerPassingInterceptor(); 11 | 12 | @AfterEach 13 | public void tearDown() { 14 | MDC.clearController(); 15 | } 16 | 17 | @Test 18 | public void controllerExistShouldReturnWithComment() { 19 | MDC.setController("resume"); 20 | 21 | String originalSql = "select * from resume;"; 22 | 23 | String sqlAfterPrepareStatement = controllerPassingInterceptor.inspect(originalSql); 24 | 25 | assertEquals("/* resume */" + originalSql, sqlAfterPrepareStatement); 26 | } 27 | 28 | @Test 29 | public void controllerExistAndHasStarShouldReturnWithComment() { 30 | MDC.setController("resume*"); 31 | 32 | String originalSql = "select * from resume;"; 33 | 34 | String sqlAfterPrepareStatement = controllerPassingInterceptor.inspect(originalSql); 35 | 36 | assertEquals("/* resume_ */" + originalSql, sqlAfterPrepareStatement); 37 | } 38 | 39 | @Test 40 | public void controllerDoesNotExistShouldReturnWithoutComment() { 41 | assertFalse(MDC.getController().isPresent()); 42 | 43 | String originalSql = "select * from resume;"; 44 | 45 | String sqlAfterPrepareStatement = controllerPassingInterceptor.inspect(originalSql); 46 | 47 | assertEquals(originalSql, sqlAfterPrepareStatement); 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /nab-web/src/test/java/ru/hh/nab/web/resource/StatusResourceTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.resource; 2 | 3 | import jakarta.inject.Inject; 4 | import jakarta.ws.rs.core.Response; 5 | import jakarta.xml.bind.annotation.XmlAttribute; 6 | import jakarta.xml.bind.annotation.XmlElement; 7 | import jakarta.xml.bind.annotation.XmlRootElement; 8 | import static org.junit.jupiter.api.Assertions.assertEquals; 9 | import static org.junit.jupiter.api.Assertions.assertTrue; 10 | import org.junit.jupiter.api.Test; 11 | import org.springframework.boot.test.context.SpringBootTest; 12 | import org.springframework.boot.test.web.client.TestRestTemplate; 13 | import org.springframework.http.ResponseEntity; 14 | import ru.hh.nab.web.NabWebTestConfig; 15 | import static ru.hh.nab.web.NabWebTestConfig.TEST_SERVICE_NAME; 16 | import static ru.hh.nab.web.NabWebTestConfig.TEST_SERVICE_VERSION; 17 | 18 | @SpringBootTest(classes = NabWebTestConfig.class, webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) 19 | public class StatusResourceTest { 20 | 21 | @Inject 22 | private TestRestTemplate testRestTemplate; 23 | 24 | @Test 25 | public void testStatusResponse() { 26 | ResponseEntity response = testRestTemplate.getForEntity("/status", Project.class); 27 | assertEquals(Response.Status.OK.getStatusCode(), response.getStatusCode().value()); 28 | Project project = response.getBody(); 29 | assertEquals(TEST_SERVICE_NAME, project.name); 30 | assertEquals(TEST_SERVICE_VERSION, project.version); 31 | assertTrue(project.uptime > 0); 32 | } 33 | 34 | @XmlRootElement 35 | private static final class Project { 36 | @XmlAttribute 37 | private String name; 38 | @XmlElement 39 | private String version; 40 | @XmlElement 41 | private long uptime; 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /nab-kafka/src/test/java/ru/hh/nab/kafka/consumer/KafkaConsumerFactoryTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.kafka.consumer; 2 | 3 | import java.util.List; 4 | import java.util.concurrent.TimeUnit; 5 | import static org.awaitility.Awaitility.await; 6 | import org.junit.jupiter.api.AfterEach; 7 | import org.junit.jupiter.api.BeforeEach; 8 | import org.junit.jupiter.api.Test; 9 | 10 | public class KafkaConsumerFactoryTest extends KafkaConsumerTestBase { 11 | private TopicConsumerMock consumerMock; 12 | private KafkaConsumer consumer; 13 | 14 | @BeforeEach 15 | public void setUp() { 16 | consumerMock = new TopicConsumerMock<>(); 17 | } 18 | 19 | @AfterEach 20 | public void tearDown() { 21 | consumer.stop(); 22 | } 23 | 24 | @Test 25 | public void shouldReceiveSingleMessageFromTopic() { 26 | consumer = startMessagesConsumer(String.class, consumerMock); 27 | 28 | String payload = "it's test message"; 29 | kafkaTestUtils.sendMessage(topicName, payload); 30 | 31 | await() 32 | .atMost(5, TimeUnit.SECONDS) 33 | .untilAsserted(() -> consumerMock.assertMessagesEquals(List.of(payload))); 34 | } 35 | 36 | @Test 37 | public void shouldReceiveMessageByMessageFromTopic() { 38 | consumer = startMessagesConsumer(String.class, consumerMock); 39 | 40 | String firstMessage = "1"; 41 | kafkaTestUtils.sendMessage(topicName, firstMessage); 42 | await() 43 | .atMost(5, TimeUnit.SECONDS) 44 | .untilAsserted(() -> consumerMock.assertMessagesEquals(List.of(firstMessage))); 45 | 46 | String secondMessage = "2"; 47 | kafkaTestUtils.sendMessage(topicName, secondMessage); 48 | await() 49 | .atMost(5, TimeUnit.SECONDS) 50 | .untilAsserted(() -> consumerMock.assertMessagesEquals(List.of(secondMessage))); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /nab-metrics/src/test/java/ru/hh/nab/metrics/UniformHistogramTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.metrics; 2 | 3 | import static java.lang.System.currentTimeMillis; 4 | import static org.junit.jupiter.api.Assertions.assertEquals; 5 | import org.junit.jupiter.api.Test; 6 | 7 | public class UniformHistogramTest { 8 | 9 | private final UniformHistogram histogram = new UniformHistogram(5_000); 10 | 11 | @Test 12 | public void twoThreads() throws InterruptedException { 13 | 14 | int increases = 1_000; 15 | Runnable task = () -> { 16 | for (int i = 0; i < increases; i++) { 17 | histogram.save(1); 18 | histogram.save(2); 19 | } 20 | }; 21 | 22 | int tests = 100; 23 | for (int t = 1; t <= tests; t++) { 24 | long start = currentTimeMillis(); 25 | 26 | Thread thread = new Thread(task); 27 | thread.start(); 28 | 29 | for (int i = 0; i < increases; i++) { 30 | histogram.save(2); 31 | histogram.save(1); 32 | } 33 | 34 | thread.join(); 35 | 36 | int firstSum = 0; 37 | int secondSum = 0; 38 | for (long value : histogram.getValuesAndReset()) { 39 | if (value == 1) { 40 | firstSum++; 41 | } else if (value == 2) { 42 | secondSum++; 43 | } 44 | } 45 | 46 | assertEquals(increases * 2, firstSum); 47 | assertEquals(increases * 2, secondSum); 48 | 49 | System.out.println("finished iteration " + t + " out of " + tests + " in " + (currentTimeMillis() - start) + " ms"); 50 | } 51 | } 52 | 53 | @Test 54 | public void overflow() { 55 | UniformHistogram histogram = new UniformHistogram(1); 56 | histogram.save(7); 57 | histogram.save(13); 58 | 59 | long[] values = histogram.getValuesAndReset(); 60 | 61 | assertEquals(1, values.length); 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/annotation/DataSourceCacheMode.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.annotation; 2 | 3 | /** 4 | * Controls how the entityManager interacts with the second-level cache or query cache. 5 | * An instance of DataSourceCacheMode may be viewed as packaging a JPA-defined CacheStoreMode with a CacheRetrieveMode. 6 | */ 7 | public enum DataSourceCacheMode { 8 | 9 | /** 10 | * NORMAL represents the combination (CacheStoreMode.USE, CacheRetrieveMode.USE) 11 | * The entityManager may read items from the cache, and add items to the cache as it reads them from the database. 12 | */ 13 | NORMAL, 14 | 15 | /** 16 | * IGNORE represents the combination (CacheStoreMode.BYPASS, CacheRetrieveMode.BYPASS) 17 | * The entityManager will never interact with the cache, except to invalidate cached items when updates occur. 18 | */ 19 | IGNORE, 20 | 21 | /** 22 | * GET represents the combination (CacheStoreMode.BYPASS, CacheRetrieveMode.USE) 23 | * The entityManager may read items from the cache, but will not add items, except to invalidate items when updates occur. 24 | */ 25 | GET, 26 | 27 | /** 28 | * PUT represents the combination (CacheStoreMode.USE, CacheRetrieveMode.BYPASS) 29 | * The entityManager will never read items from the cache, but will add items to the cache as it reads them from the database. 30 | * EntityManager does not force refresh of already cached items when reading from database. 31 | */ 32 | PUT, 33 | 34 | /** 35 | * REFRESH represents the combination (CacheStoreMode.REFRESH, CacheRetrieveMode.BYPASS) 36 | * As with to PUT, the entityManager will never read items from the cache, but will add items to the cache as it reads them from the database. 37 | * EntityManager forces refresh of cache for items read from database. 38 | */ 39 | REFRESH, 40 | } 41 | -------------------------------------------------------------------------------- /nab-data-source/src/main/java/ru/hh/nab/datasource/monitoring/StatementTimeoutDataSource.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.datasource.monitoring; 2 | 3 | import java.sql.Connection; 4 | import java.sql.SQLException; 5 | import java.sql.Statement; 6 | import javax.sql.DataSource; 7 | import ru.hh.nab.datasource.DelegatingDataSource; 8 | 9 | /** 10 | * A {@link javax.sql.DataSource} that sends Postgresql statement timeout query after obtaining a connection from the underlying datasource.
11 | * Insert between driver datasource and pooling datasource, so that statement timeout query is executed only once 12 | * and all subsequent queries are bounded by the timeout.
13 | * Do not use in front of other connection pools such as PgBouncer, because the timeout can affect other applications that share the same PgBouncer. 14 | */ 15 | public class StatementTimeoutDataSource extends DelegatingDataSource { 16 | 17 | private final String setStatementTimeoutQuery; 18 | 19 | public StatementTimeoutDataSource(DataSource delegate, int statementTimeoutMs) { 20 | super(delegate); 21 | this.setStatementTimeoutQuery = "SET STATEMENT_TIMEOUT TO " + statementTimeoutMs; 22 | } 23 | 24 | @Override 25 | public Connection getConnection() throws SQLException { 26 | Connection connection = super.getConnection(); 27 | setStatementTimeout(connection); 28 | return connection; 29 | } 30 | 31 | @Override 32 | public Connection getConnection(String username, String password) throws SQLException { 33 | Connection connection = super.getConnection(username, password); 34 | setStatementTimeout(connection); 35 | return connection; 36 | } 37 | 38 | private void setStatementTimeout(Connection connection) throws SQLException { 39 | try (Statement statement = connection.createStatement()) { 40 | statement.execute(setStatementTimeoutQuery); 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /nab-common/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | 5 | ru.hh.nab 6 | nuts-and-bolts-parent 7 | 22.0.5-SNAPSHOT 8 | ../pom.xml 9 | 10 | 11 | nab-common 12 | jar 13 | 14 | nuts'n'bolts common utils 15 | 16 | 17 | 18 | ru.hh.trace 19 | trace-context 20 | 2.0.1 21 | 22 | 23 | jakarta.inject 24 | jakarta.inject-api 25 | 26 | 27 | 28 | 29 | org.slf4j 30 | slf4j-api 31 | 32 | 33 | 34 | 35 | org.junit.jupiter 36 | junit-jupiter-engine 37 | test 38 | 39 | 40 | org.mockito 41 | mockito-core 42 | test 43 | 44 | 45 | ch.qos.logback 46 | logback-classic 47 | test 48 | 49 | 50 | 51 | 52 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/jersey/resolver/XmlEscapeHandler.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.resolver; 2 | 3 | import java.io.IOException; 4 | import java.io.Writer; 5 | import org.glassfish.jaxb.core.marshaller.CharacterEscapeHandler; 6 | 7 | /** 8 | * Copy of {@link com.sun.xml.bind.marshaller.MinimumEscapeHandler}. 9 | * Also replaces invalid text symbols. 10 | */ 11 | public class XmlEscapeHandler implements CharacterEscapeHandler { 12 | public static final CharacterEscapeHandler INSTANCE = new XmlEscapeHandler(); 13 | 14 | private XmlEscapeHandler() {} 15 | 16 | public void escape(char[] ch, int start, int length, boolean isAttVal, Writer out) throws IOException { 17 | int limit = start + length; 18 | for (int i = start; i < limit; i++) { 19 | char c = ch[i]; 20 | if (c == '&' || c == '<' || c == '>' || c == '\r' || (c == '\n' && isAttVal) || (c == '\"' && isAttVal) || 21 | CharacterEscapeBase.isInvalidTextSymbol(c)) { 22 | 23 | if (i != start) { 24 | out.write(ch, start, i - start); 25 | } 26 | start = i + 1; 27 | switch (c) { 28 | case '&': 29 | out.write("&"); 30 | break; 31 | case '<': 32 | out.write("<"); 33 | break; 34 | case '>': 35 | out.write(">"); 36 | break; 37 | case '\n': 38 | case '\r': 39 | out.write("&#"); 40 | out.write(Integer.toString(c)); 41 | out.write(';'); 42 | break; 43 | case '\"': 44 | out.write("""); 45 | break; 46 | default: 47 | out.write(CharacterEscapeBase.REPLACEMENT_CHAR); 48 | break; 49 | } 50 | } 51 | } 52 | 53 | if (start != limit) { 54 | out.write(ch, start, limit - start); 55 | } 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /nab-web/src/main/java/ru/hh/nab/web/jersey/filter/ErrorAcceptFilter.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.filter; 2 | 3 | import jakarta.ws.rs.container.ContainerRequestContext; 4 | import jakarta.ws.rs.container.ContainerResponseContext; 5 | import jakarta.ws.rs.container.ContainerResponseFilter; 6 | import static jakarta.ws.rs.core.HttpHeaders.CONTENT_TYPE; 7 | import java.io.IOException; 8 | import java.text.ParseException; 9 | import java.util.ArrayList; 10 | import java.util.List; 11 | import org.glassfish.jersey.message.internal.AcceptableMediaType; 12 | import org.glassfish.jersey.message.internal.HttpHeaderReader; 13 | import org.slf4j.Logger; 14 | import org.slf4j.LoggerFactory; 15 | import static ru.hh.jclient.common.HttpHeaderNames.X_HH_ACCEPT_ERRORS; 16 | 17 | public class ErrorAcceptFilter implements ContainerResponseFilter { 18 | private static final Logger LOGGER = LoggerFactory.getLogger(ErrorAcceptFilter.class); 19 | 20 | @Override 21 | public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext) throws IOException { 22 | if (responseContext.getStatus() >= 400) { 23 | String acceptErrors = requestContext.getHeaders().getFirst(X_HH_ACCEPT_ERRORS); 24 | if (acceptErrors != null) { 25 | try { 26 | List acceptableMediaTypes = HttpHeaderReader.readAcceptMediaType(acceptErrors); 27 | if (!acceptableMediaTypes.isEmpty()) { 28 | responseContext.getHeaders().replace(CONTENT_TYPE, new ArrayList<>(List.of(acceptableMediaTypes.get(0).toString()))); 29 | } else { 30 | LOGGER.warn("No valid AcceptableMediaType for errors found in {} header: {}", X_HH_ACCEPT_ERRORS, acceptErrors); 31 | } 32 | } catch (ParseException e) { 33 | LOGGER.warn("Error while parsing {} header.", X_HH_ACCEPT_ERRORS, e); 34 | } 35 | } 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /nab-hibernate/src/main/java/ru/hh/nab/hibernate/monitoring/HibernateStatisticsSender.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.hibernate.monitoring; 2 | 3 | import java.util.Map; 4 | import java.util.Optional; 5 | import java.util.Properties; 6 | import java.util.Set; 7 | import org.hibernate.SessionFactory; 8 | import org.hibernate.stat.Statistics; 9 | import static ru.hh.nab.hibernate.monitoring.HibernateMetrics.QUERY_PLAN_CACHE_HIT_COUNT; 10 | import static ru.hh.nab.hibernate.monitoring.HibernateMetrics.QUERY_PLAN_CACHE_MISS_COUNT; 11 | import ru.hh.nab.metrics.StatsDSender; 12 | import ru.hh.nab.metrics.Tag; 13 | import ru.hh.nab.metrics.TaggedSender; 14 | 15 | public class HibernateStatisticsSender { 16 | 17 | private static final String HIBERNATE_GENERATE_STATISTICS_PROPERTY = "hibernate.generate_statistics"; 18 | private static final String SESSION_FACTORY_NAME_TAG = "factory"; 19 | 20 | public HibernateStatisticsSender( 21 | Properties hibernateProperties, 22 | String serviceName, 23 | Map sessionFactories, 24 | StatsDSender statsDSender 25 | ) { 26 | if (!Optional.ofNullable(hibernateProperties.getProperty(HIBERNATE_GENERATE_STATISTICS_PROPERTY)).orElse("").equals("true")) { 27 | return; 28 | } 29 | 30 | sessionFactories.forEach((sessionFactoryName, sessionFactory) -> { 31 | var sender = new TaggedSender( 32 | statsDSender, 33 | Set.of(new Tag(Tag.APP_TAG_NAME, serviceName), new Tag(SESSION_FACTORY_NAME_TAG, sessionFactoryName)) 34 | ); 35 | 36 | statsDSender.sendPeriodically(() -> { 37 | Statistics statistics = sessionFactory.getStatistics(); 38 | 39 | sender.sendCount(QUERY_PLAN_CACHE_HIT_COUNT, statistics.getQueryPlanCacheHitCount()); 40 | sender.sendCount(QUERY_PLAN_CACHE_MISS_COUNT, statistics.getQueryPlanCacheMissCount()); 41 | 42 | statistics.clear(); 43 | }); 44 | }); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /nab-web/src/test/java/ru/hh/nab/web/jersey/filter/cache/CacheFilterTest.java: -------------------------------------------------------------------------------- 1 | package ru.hh.nab.web.jersey.filter.cache; 2 | 3 | import java.nio.ByteBuffer; 4 | import java.util.ArrayList; 5 | import static org.junit.jupiter.api.Assertions.assertArrayEquals; 6 | import static org.junit.jupiter.api.Assertions.assertEquals; 7 | import static org.junit.jupiter.api.Assertions.assertNull; 8 | import static org.junit.jupiter.api.Assertions.assertTrue; 9 | import org.junit.jupiter.api.Test; 10 | 11 | public class CacheFilterTest { 12 | private static final Serializer SERIALIZER = new Serializer(); 13 | 14 | @Test 15 | public void testPlaceholderSerializer() { 16 | CachedResponse response = new CachedResponse(); 17 | byte[] data = response.getSerialized(); 18 | 19 | ByteBuffer buffer = ByteBuffer.allocate(SERIALIZER.serializedSize(data)); 20 | SERIALIZER.serialize(data, buffer); 21 | 22 | buffer.rewind(); 23 | CachedResponse result = CachedResponse.from(SERIALIZER.deserialize(buffer)); 24 | 25 | assertEquals(response.status, result.status); 26 | assertNull(response.headers); 27 | assertNull(response.body); 28 | assertTrue(response.isPlaceholder()); 29 | } 30 | 31 | @Test 32 | public void testSerializer() { 33 | CachedResponse response = new CachedResponse(200, new ArrayList<>(), new byte[]{1, 2}); 34 | response.headers.add(new Header("1", "2")); 35 | response.headers.add(new Header("X-Header", "что-то")); 36 | 37 | byte[] data = response.getSerialized(); 38 | 39 | ByteBuffer buffer = ByteBuffer.allocate(SERIALIZER.serializedSize(data)); 40 | SERIALIZER.serialize(data, buffer); 41 | 42 | buffer.rewind(); 43 | CachedResponse result = CachedResponse.from(SERIALIZER.deserialize(buffer)); 44 | 45 | assertEquals(response.status, result.status); 46 | assertEquals(response.headers, result.headers); 47 | assertArrayEquals(response.body, result.body); 48 | } 49 | } 50 | --------------------------------------------------------------------------------