├── .github
├── CODEOWNERS
├── ISSUE_TEMPLATE
│ ├── bug-report.md
│ ├── config.yml
│ ├── feature-request.md
│ └── story.md
├── renovate.json5
└── workflows
│ ├── automated_release.yaml
│ ├── on_prerelease.yaml
│ ├── on_push_pr.yaml
│ ├── on_release.yaml
│ ├── repolinter.yml
│ └── security.yaml
├── .gitignore
├── .papers_config.yml
├── .semgrep.yml
├── CHANGELOG.md
├── CONTRIBUTING.md
├── LICENSE
├── Makefile
├── README.md
├── THIRD_PARTY_NOTICES.md
├── build
├── .goreleaser.yml
├── ci.mk
├── nix
│ └── fix_archives.sh
├── package
│ └── windows
│ │ ├── nri-386-installer
│ │ ├── Product.wxs
│ │ └── nri-installer.wixproj
│ │ └── nri-amd64-installer
│ │ ├── Product.wxs
│ │ └── nri-installer.wixproj
├── release.mk
├── upload_artifacts_gh.sh
└── windows
│ ├── download_zip.ps1
│ ├── extract_exe.ps1
│ ├── fix_archives.sh
│ ├── package_msi.ps1
│ ├── set_exe_properties.sh
│ ├── unit_tests.ps1
│ ├── upload_msi.sh
│ └── versioninfo.json.template
├── go.mod
├── go.sum
├── legacy
├── postgresql-definition.yml
└── postgresql-win-definition.yml
├── papers_manifest.yml
├── postgresql-config.yml.k8s_sample
├── postgresql-config.yml.sample
├── postgresql-custom-query.yml.sample
├── postgresql-log.yml.example
├── queries
├── database_queries.pgsql
├── instance_queries.pgsql
└── inventory.pgsql
├── src
├── args
│ ├── argument_list.go
│ └── argument_list_test.go
├── collection
│ ├── collection.go
│ └── collection_test.go
├── connection
│ ├── pgsql_connection.go
│ ├── pgsql_connection_mock.go
│ └── pgsql_connection_test.go
├── fips.go
├── inventory
│ ├── inventory.go
│ └── inventory_test.go
├── main.go
├── metrics
│ ├── database_definitions.go
│ ├── database_definitions_test.go
│ ├── index_definitions.go
│ ├── instance_definitions.go
│ ├── instance_definitions_test.go
│ ├── lock_definitions.go
│ ├── metric_types.go
│ ├── metrics.go
│ ├── metrics_test.go
│ ├── modelers.go
│ ├── pgbouncer_definitions.go
│ ├── table_definitions.go
│ └── version_test.go
└── query-performance-monitoring
│ ├── common-parameters
│ └── common_parameters.go
│ ├── common-utils
│ ├── common_helpers.go
│ ├── common_helpers_test.go
│ ├── constants.go
│ ├── ingestion-helpers.go
│ ├── ingestion_helper_test.go
│ ├── query_fetch_helpers.go
│ └── query_fetch_helpers_test.go
│ ├── datamodels
│ └── performance_data_models.go
│ ├── performance-metrics
│ ├── blocking_sessions.go
│ ├── blocking_sessions_test.go
│ ├── execution_plan_metrics.go
│ ├── execution_plan_metrics_test.go
│ ├── individual_query_metrics.go
│ ├── individual_query_metrics_test.go
│ ├── slow_query_metrics.go
│ ├── slow_query_metrics_test.go
│ ├── wait_event_metrics.go
│ └── wait_event_metrics_test.go
│ ├── queries
│ └── queries.go
│ ├── query_performance_main.go
│ └── validations
│ ├── performance_metrics_validations.go
│ └── performance_metrics_validations_test.go
└── tests
├── README.md
├── docker-compose-performance.yml
├── docker-compose-pgbouncer.yml
├── docker-compose.yml
├── perf-testing
├── integration
│ └── Dockerfile
├── latest_supported
│ ├── 01-init-extensions.sql
│ ├── 02-create-database.sql
│ ├── 03-import-data.sql
│ └── Dockerfile
└── oldest_supported
│ ├── 01-init-extensions.sql
│ ├── 02-create-database.sql
│ ├── 03-import-data.sql
│ └── Dockerfile
├── postgresql_test.go
├── postgresqlperf_test.go
├── simulation
├── helpers.go
└── sim_queries.go
└── testdata
├── blocking-sessions-schema.json
├── execution-plan-schema.json
├── individual-queries-schema.json
├── jsonschema-inventory-latest.json
├── jsonschema-latest.json
├── jsonschema96.json
├── slow-queries-schema.json
└── wait-events-schema.json
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | # This is a comment.
2 | # Each line is a file pattern followed by one or more owners.
3 |
4 | # These owners will be the default owners for everything in
5 | # the repo. Unless a later match takes precedence.
6 |
7 | * @newrelic/ohai
8 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug-report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | ^^ Provide a general summary of the issue in the title above. ^^
11 |
12 | ## Description
13 | Describe the problem you're encountering.
14 | TIP: Do NOT share sensitive information, whether personal, proprietary, or otherwise!
15 |
16 | ## Expected Behavior
17 | Tell us what you expected to happen.
18 |
19 | ## [Troubleshooting](https://discuss.newrelic.com/t/troubleshooting-frameworks/108787) or [NR Diag](https://docs.newrelic.com/docs/using-new-relic/cross-product-functions/troubleshooting/new-relic-diagnostics) results
20 | Provide any other relevant log data.
21 | TIP: Scrub logs and diagnostic information for sensitive information
22 |
23 | ## Steps to Reproduce
24 | Please be as specific as possible.
25 | TIP: Link a sample application that demonstrates the issue.
26 |
27 | ## Your Environment
28 | Include as many relevant details about your environment as possible including the running version of New Relic software and any relevant configurations.
29 |
30 | ## Additional context
31 | Add any other context about the problem here. For example, relevant community posts or support tickets.
32 |
33 | ## For Maintainers Only or Hero Triaging this bug
34 | *Suggested Priority (P1,P2,P3,P4,P5):*
35 | *Suggested T-Shirt size (S, M, L, XL, Unknown):*
36 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: false
2 | contact_links:
3 | - name: Troubleshooting
4 | url: https://github.com/newrelic/nri-postegresql/blob/master/README.md#support
5 | about: Check out the README for troubleshooting directions
6 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature-request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature Request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: 'feature request'
6 | assignees: ''
7 | priority: ''
8 | ---
9 | ### Description
10 | _A clear and concise description of the feature you want or need_
11 |
12 | ### Acceptance Criteria
13 | _What tasks need to be accomplished to achieve the goal?_
14 |
15 | ### Describe Alternatives
16 | _A clear and concise description of any alternative solutions or features you've considered_
17 | _Are there examples you could link us to?_
18 |
19 | ### Dependencies
20 | _Do any other teams or parts of the New Relic product need to be considered?_
21 | _Some common areas: UI, collector, documentation_
22 |
23 | ### Additional context
24 | _What else should we know about this story that might not fit into the other categories?_
25 |
26 | ### Estimates
27 | _Please provide initial t-shirt size. S = 1-3 days, M = 3-5 days (1 week), L = 1-2 weeks (1 sprint)_
28 |
29 | ## For Maintainers Only or Hero Triaging this bug
30 | *Suggested Priority (P1,P2,P3,P4,P5):*
31 | *Suggested T-Shirt size (S, M, L, XL, Unknown):*
32 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/story.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Story
3 | about: Issue describing development work to fulfill a feature request
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 | priority: ''
8 | ---
9 | ### Description
10 | _What's the goal of this unit of work? What is included? What isn't included?_
11 |
12 | ### Acceptance Criteria
13 | _What tasks need to be accomplished to achieve the goal?_
14 |
15 | ### Design Consideration/Limitations
16 | _Why is this the route we should take to achieve our goal?_
17 | _What can't be achieved within this story?_
18 |
19 | ### Dependencies
20 | _Do any other teams or parts of the New Relic product need to be considered?_
21 | _Some common areas: UI, collector, documentation_
22 |
23 | ### Additional context
24 | _What else should we know about this story that might not fit into the other categories?_
25 |
26 | ### Estimates
27 | _Please provide initial t-shirt size. S = 1-3 days, M = 3-5 days (1 week), L = 1-2 weeks (1 sprint)_
28 |
--------------------------------------------------------------------------------
/.github/renovate.json5:
--------------------------------------------------------------------------------
1 | {
2 | "extends": [
3 | "github>newrelic/coreint-automation:renovate-base.json5"
4 | ]
5 | }
6 |
--------------------------------------------------------------------------------
/.github/workflows/automated_release.yaml:
--------------------------------------------------------------------------------
1 | name: Automated release creation
2 |
3 | on:
4 | workflow_dispatch:
5 | schedule:
6 | - cron: "0 15 * * 1"
7 |
8 | jobs:
9 | release_management:
10 | uses: newrelic/coreint-automation/.github/workflows/reusable_release_automation.yaml@v3
11 | secrets: inherit
12 |
--------------------------------------------------------------------------------
/.github/workflows/on_prerelease.yaml:
--------------------------------------------------------------------------------
1 | name: Prerelease pipeline
2 |
3 | on:
4 | release:
5 | types:
6 | - prereleased
7 | tags:
8 | - 'v*'
9 |
10 | jobs:
11 | pre-release:
12 | uses: newrelic/coreint-automation/.github/workflows/reusable_pre_release.yaml@v3
13 | with:
14 | tag: ${{ github.event.release.tag_name }}
15 | integration: "postgresql"
16 | upload_fips_packages: true
17 | secrets: inherit
18 |
--------------------------------------------------------------------------------
/.github/workflows/on_push_pr.yaml:
--------------------------------------------------------------------------------
1 | name: Push/PR
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | - master
8 | - renovate/**
9 | pull_request:
10 | workflow_dispatch:
11 |
12 | jobs:
13 | push-pr:
14 | uses: newrelic/coreint-automation/.github/workflows/reusable_push_pr.yaml@v3
15 | with:
16 | integration: postgresql
17 | run_test_build_fake_prerelease: true
18 | secrets: inherit
19 |
--------------------------------------------------------------------------------
/.github/workflows/on_release.yaml:
--------------------------------------------------------------------------------
1 | name: Create release artifacts
2 |
3 | on:
4 | release:
5 | types:
6 | - released
7 | tags:
8 | - 'v*'
9 |
10 | jobs:
11 | release:
12 | uses: newrelic/coreint-automation/.github/workflows/reusable_on_release.yaml@v3
13 | with:
14 | integration: postgresql
15 | tag: ${{ github.event.release.tag_name }}
16 | upload_fips_packages: true
17 | secrets: inherit
18 |
--------------------------------------------------------------------------------
/.github/workflows/repolinter.yml:
--------------------------------------------------------------------------------
1 | # NOTE: This file should always be named `repolinter.yml` to allow
2 | # workflow_dispatch to work properly
3 | name: Repolinter Action
4 |
5 | on:
6 | push:
7 | workflow_dispatch:
8 |
9 | jobs:
10 | repolinter:
11 | uses: newrelic/coreint-automation/.github/workflows/reusable_repolinter.yaml@v3
12 |
--------------------------------------------------------------------------------
/.github/workflows/security.yaml:
--------------------------------------------------------------------------------
1 | name: Security Scan
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | - main
8 | - renovate/**
9 | pull_request:
10 | schedule:
11 | - cron: "0 3 * * *"
12 |
13 | jobs:
14 | security:
15 | uses: newrelic/coreint-automation/.github/workflows/reusable_security.yaml@v3
16 | with:
17 | skip-dirs: "build"
18 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Compiled Object files, Static and Dynamic libs (Shared Objects)
2 | *.o
3 | *.a
4 | *.so
5 |
6 | # Folders
7 | _obj
8 | _test
9 | bin
10 |
11 | # Architecture specific extensions/prefixes
12 | *.[568vq]
13 | [568vq].out
14 |
15 | *.cgo1.go
16 | *.cgo2.c
17 | _cgo_defun.c
18 | _cgo_gotypes.go
19 | _cgo_export.*
20 |
21 | _testmain.go
22 |
23 | *.exe
24 | *.test
25 | *.prof
26 | *.DS_STORE
27 |
28 | /integrations/*/bin/
29 | coverage.xml
30 | target/
31 | # Vim swap
32 | [._]*.s[a-w][a-z]
33 | [._]s[a-w][a-z]
34 |
35 | # Vim session
36 | Session.vim
37 |
38 | # VSCode
39 | .vscode
40 | debug
41 | server.key
42 | server.crt
43 | root.crt
44 | postgresql-config.yml
45 | Makefile-deploy.mk
46 |
47 | bin/
48 | dist/
49 | vendor
50 |
51 | # Local development tools/files
52 | .idea
53 | .env
54 | .envrc
55 | *.gpg
56 |
57 | # build files
58 | cmd/nri-postgresql/versioninfo.json
59 | cmd/nri-postgresql/resource.syso
60 |
61 | # Release toolkit
62 | CHANGELOG.partial.md
63 |
--------------------------------------------------------------------------------
/.papers_config.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | manifest_file: papers_manifest.yml
4 | accept:
5 | - Apache-2.0 # Use of Apache conditioned upon no contribution of modified code back to original author/open source community.
6 | - MIT
7 | - ISC
8 | - BSD-2-Clause-FreeBSD
9 | - BSD-2-Clause-NetBSD
10 | - BSD-2-Clause
11 | - BSD-3-Clause-Attribution
12 | - BSD-3-Clause-Clear
13 | - BSD-3-Clause-LBNL
14 | - BSD-3-Clause
15 | - BSD-4-Clause-UC
16 | - BSD-4-Clause
17 | - BSD-Protection
18 | - MS-PL
19 | - Ruby # Use of Ruby conditioned upon selection of the BSDL option for any internal/non-source-available modifications.
20 | - ISC
21 | - CC0-1.0
22 |
23 | reject:
24 | - AGPL-1.0
25 | - AGPL-3.0
26 | - GPL-2.0-with-GCC-exception # Certain GPL licenses have a Classpath exception that we have accepted in limited circumstances because it enable us to put it under our closed, proprietary license.
27 | - GPL-2.0-with-autoconf-exception
28 | - GPL-2.0-with-bison-exception
29 | - GPL-2.0-with-classpath-exception
30 | - GPL-2.0-with-font-exception
31 | - GPL-2.0
32 | - GPL-3.0-with-GCC-exception
33 | - GPL-3.0-with-autoconf-exception
34 | - GPL-3.0
35 | - LGPL-2.0
36 | - LGPL-2.1
37 | - LGPL-3.0
38 | - Artistic-1.0-Perl
39 | - Artistic-1.0-cl8
40 | - Artistic-1.0
41 | - Artistic-2.0
42 | - MPL-2.0
43 | - CDDL-1.0
44 | - CDDL-1.1
45 | - EPL-1.0
46 |
47 | exceptions:
48 | - github.com/newrelic/nri-postgresql/vendor/github.com/newrelic/infra-integrations-sdk/args
49 | - github.com/newrelic/nri-postgresql/vendor/github.com/newrelic/infra-integrations-sdk/data/event
50 | - github.com/newrelic/nri-postgresql/vendor/github.com/newrelic/infra-integrations-sdk/data/inventory
51 | - github.com/newrelic/nri-postgresql/vendor/github.com/newrelic/infra-integrations-sdk/data/metric
52 | - github.com/newrelic/nri-postgresql/vendor/github.com/newrelic/infra-integrations-sdk/integration
53 | - github.com/newrelic/nri-postgresql/vendor/github.com/newrelic/infra-integrations-sdk/log
54 | - github.com/newrelic/nri-postgresql/vendor/github.com/newrelic/infra-integrations-sdk/persist
55 |
56 |
--------------------------------------------------------------------------------
/.semgrep.yml:
--------------------------------------------------------------------------------
1 | rules:
2 | - id: string-formatted-query
3 | languages: [go]
4 | message: |
5 | String-formatted SQL query detected. This could lead to SQL injection if
6 | the string is not sanitized properly. Audit this call to ensure the
7 | SQL is not manipulatable by external data.
8 | severity: WARNING
9 | metadata:
10 | owasp: 'A1: Injection'
11 | cwe: "CWE-89: Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')"
12 | source-rule-url: https://github.com/securego/gosec
13 | patterns:
14 | - pattern-not-inside: |
15 | $VAR = "..." + "..."
16 | ...
17 | $OBJ.$SINK(..., $VAR, ...)
18 | - pattern-either:
19 | - pattern: $OBJ.Exec("..." + $X)
20 | - pattern: $OBJ.ExecContext($CTX, "..." + $X)
21 | - pattern: $OBJ.Query("..." + $X)
22 | - pattern: $OBJ.QueryContext($CTX, "..." + $X)
23 | - pattern: $OBJ.QueryRow("..." + $X)
24 | - pattern: $OBJ.QueryRow($CTX, "..." + $X)
25 | - pattern: $OBJ.QueryRowContext($CTX, "..." + $X)
26 | - pattern: $OBJ.Exec(fmt.$P("...", ...))
27 | - pattern: $OBJ.ExecContext($CTX, fmt.$P("...", ...))
28 | - pattern: $OBJ.Query(fmt.$P("...", ...))
29 | - pattern: $OBJ.QueryContext($CTX, fmt.$P("...", ...))
30 | - pattern: $OBJ.QueryRow(fmt.$P("...", ...))
31 | - pattern: $OBJ.QueryRow($CTX, fmt.$P("...", ...))
32 | - pattern: $OBJ.QueryRowContext($CTX, fmt.$P("...", ...))
33 | - pattern: |
34 | $QUERY = "..."
35 | ...
36 | $QUERY = $FXN(..., $QUERY, ...)
37 | ...
38 | $OBJ.Exec($QUERY, ...)
39 | - pattern: |
40 | $QUERY = "..."
41 | ...
42 | $QUERY = $FXN(..., $QUERY, ...)
43 | ...
44 | $OBJ.Query($QUERY, ...)
45 | - pattern: |
46 | $QUERY = "..."
47 | ...
48 | $QUERY = $FXN(..., $QUERY, ...)
49 | ...
50 | $OBJ.ExecContext($CTX, $QUERY, ...)
51 | - pattern: |
52 | $QUERY = "..."
53 | ...
54 | $QUERY = $FXN(..., $QUERY, ...)
55 | ...
56 | $OBJ.QueryContext($CTX, $QUERY, ...)
57 | - pattern: |
58 | $QUERY = "..."
59 | ...
60 | $QUERY = $FXN(..., $QUERY, ...)
61 | ...
62 | $OBJ.QueryRow($QUERY)
63 | - pattern: |
64 | $QUERY = "..."
65 | ...
66 | $QUERY = $FXN(..., $QUERY, ...)
67 | ...
68 | $OBJ.QueryRow($CTX, $QUERY)
69 | - pattern: |
70 | $QUERY = "..."
71 | ...
72 | $QUERY = $FXN(..., $QUERY, ...)
73 | ...
74 | $OBJ.QueryRowContext($CTX, $QUERY, ...)
75 | - pattern: |
76 | $QUERY = "..."
77 | ...
78 | $OTHER = $FXN(..., $QUERY, ...)
79 | ...
80 | $OBJ.Exec($OTHER, ...)
81 | - pattern: |
82 | $QUERY = "..."
83 | ...
84 | $OTHER = $FXN(..., $QUERY, ...)
85 | ...
86 | $OBJ.Query($OTHER, ...)
87 | - pattern: |
88 | $QUERY = "..."
89 | ...
90 | $OTHER = $FXN(..., $QUERY, ...)
91 | ...
92 | $OBJ.ExecContext($CTX, $OTHER, ...)
93 | - pattern: |
94 | $QUERY = "..."
95 | ...
96 | $OTHER = $FXN(..., $QUERY, ...)
97 | ...
98 | $OBJ.QueryContext($CTX, $OTHER, ...)
99 | - pattern: |
100 | $QUERY = "..."
101 | ...
102 | $OTHER = $FXN(..., $QUERY, ...)
103 | ...
104 | $OBJ.QueryRow($OTHER)
105 | - pattern: |
106 | $QUERY = "..."
107 | ...
108 | $OTHER = $FXN(..., $QUERY, ...)
109 | ...
110 | $OBJ.QueryRow($CTX, $OTHER)
111 | - pattern: |
112 | $QUERY = "..."
113 | ...
114 | $OTHER = $FXN(..., $QUERY, ...)
115 | ...
116 | $OBJ.QueryRowContext($CTX, $OTHER, ...)
117 | - pattern: |
118 | $QUERY = "..." + $X
119 | ...
120 | $OBJ.Exec($QUERY, ...)
121 | - pattern: |
122 | $QUERY = "..." + $X
123 | ...
124 | $OBJ.Query($QUERY, ...)
125 | - pattern: |
126 | $QUERY = "..." + $X
127 | ...
128 | $OBJ.ExecContext($CTX, $QUERY, ...)
129 | - pattern: |
130 | $QUERY = "..." + $X
131 | ...
132 | $OBJ.QueryContext($CTX, $QUERY, ...)
133 | - pattern: |
134 | $QUERY = "..." + $X
135 | ...
136 | $OBJ.QueryRow($QUERY)
137 | - pattern: |
138 | $QUERY = "..." + $X
139 | ...
140 | $OBJ.QueryRow($CTX, $QUERY)
141 | - pattern: |
142 | $QUERY = "..." + $X
143 | ...
144 | $OBJ.QueryRowContext($CTX, $QUERY, ...)
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | Contributions are always welcome. Before contributing please read the
4 | [code of conduct](./CODE_OF_CONDUCT.md) and [search the issue tracker](issues); your issue may have already been discussed or fixed in `main`. To contribute,
5 | [fork](https://help.github.com/articles/fork-a-repo/) this repository, commit your changes, and [send a Pull Request](https://help.github.com/articles/using-pull-requests/).
6 |
7 | Note that our [code of conduct](./CODE_OF_CONDUCT.md) applies to all platforms and venues related to this project; please follow it in all your interactions with the project and its participants.
8 |
9 | ## Feature Requests
10 |
11 | Feature requests should be submitted in the [Issue tracker](../../issues), with a description of the expected behavior & use case, where they’ll remain closed until sufficient interest, [e.g. :+1: reactions](https://help.github.com/articles/about-discussions-in-issues-and-pull-requests/), has been [shown by the community](../../issues?q=label%3A%22votes+needed%22+sort%3Areactions-%2B1-desc).
12 | Before submitting an Issue, please search for similar ones in the
13 | [closed issues](../../issues?q=is%3Aissue+is%3Aclosed+label%3Aenhancement).
14 |
15 | ## Pull Requests
16 |
17 | 1. Ensure any install or build dependencies are removed before the end of the layer when doing a build.
18 | 2. Increase the version numbers in any examples files and the README.md to the new version that this Pull Request would represent. The versioning scheme we use is [SemVer](http://semver.org/).
19 | 3. You may merge the Pull Request in once you have the sign-off of two other developers, or if you do not have permission to do that, you may request the second reviewer to merge it for you.
20 |
21 | ## Contributor License Agreement
22 |
23 | Keep in mind that when you submit your Pull Request, you'll need to sign the CLA via the click-through using CLA-Assistant. If you'd like to execute our corporate CLA, or if you have any questions, please drop us an email at opensource@newrelic.com.
24 |
25 | For more information about CLAs, please check out Alex Russell’s excellent post,
26 | [“Why Do I Need to Sign This?”](https://infrequently.org/2008/06/why-do-i-need-to-sign-this/).
27 |
28 | ## Slack
29 |
30 | We host a public Slack with a dedicated channel for contributors and maintainers of open source projects hosted by New Relic. If you are contributing to this project, you're welcome to request access to the #oss-contributors channel in the newrelicusers.slack.com workspace. To request access, please use this [link](https://join.slack.com/t/newrelicusers/shared_invite/zt-1ayj69rzm-~go~Eo1whIQGYnu3qi15ng).
31 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2018, Blue Medora Inc.
2 |
3 |
4 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in
5 | the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
6 | Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
7 |
8 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
9 |
10 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
11 | WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
12 | OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
13 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
14 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | export PATH := $(PATH):$(GOPATH)/bin
2 | NATIVEOS := $(shell go version | awk -F '[ /]' '{print $$4}')
3 | NATIVEARCH := $(shell go version | awk -F '[ /]' '{print $$5}')
4 | INTEGRATION := postgresql
5 | GOFLAGS = -mod=readonly # ignore the vendor directory and to report an error if go.mod needs to be updated.
6 | BINARY_NAME = nri-$(INTEGRATION)
7 | INTEGRATIONS_DIR = /var/db/newrelic-infra/newrelic-integrations/
8 | CONFIG_DIR = /etc/newrelic-infra/integrations.d
9 | GO_FILES := ./src/
10 | GO_VERSION ?= $(shell grep '^go ' go.mod | awk '{print $$2}')
11 | BUILDER_IMAGE ?= "ghcr.io/newrelic/coreint-automation:latest-go$(GO_VERSION)-ubuntu16.04"
12 |
13 | all: build
14 |
15 | build: clean test compile
16 |
17 | build-container:
18 | docker build -t nri-postgresql .
19 |
20 | clean:
21 | @echo "=== $(INTEGRATION) === [ clean ]: Removing binaries and coverage file..."
22 | @rm -rfv bin coverage.xml
23 |
24 | compile:
25 | @echo "=== $(INTEGRATION) === [ compile ]: Building $(BINARY_NAME)..."
26 | @go build -o bin/$(BINARY_NAME) $(GO_FILES)
27 |
28 | test:
29 | @echo "=== $(INTEGRATION) === [ test ]: running unit tests..."
30 | @go test -race ./... -count=1
31 |
32 |
33 | integration-test:
34 | @echo "=== $(INTEGRATION) === [ test ]: running integration tests..."
35 | @docker compose -f tests/docker-compose.yml up -d
36 | # Sleep added to allow postgres with test data and extensions to start up
37 | @sleep 10
38 | @go test -v -tags=integration -count 1 ./tests/postgresql_test.go -timeout 300s || (ret=$$?; docker compose -f tests/docker-compose.yml down -v && exit $$ret)
39 | @docker compose -f tests/docker-compose.yml down -v
40 | @echo "=== $(INTEGRATION) === [ test ]: running integration tests for query performance monitoring..."
41 | @echo "Starting containers for performance tests..."
42 | @docker compose -f tests/docker-compose-performance.yml up -d
43 | # Sleep added to allow postgres with test data and extensions to start up
44 | @sleep 30
45 | @go test -v -tags=query_performance ./tests/postgresqlperf_test.go -timeout 600s || (ret=$$?; docker compose -f tests/docker-compose-performance.yml down -v && exit $$ret)
46 | @echo "Stopping performance test containers..."
47 | @docker compose -f tests/docker-compose-performance.yml down -v
48 |
49 | install: compile
50 | @echo "=== $(INTEGRATION) === [ install ]: installing bin/$(BINARY_NAME)..."
51 | @sudo install -D --mode=755 --owner=root --strip $(ROOT)bin/$(BINARY_NAME) $(INTEGRATIONS_DIR)/bin/$(BINARY_NAME)
52 | @sudo install -D --mode=644 --owner=root $(ROOT)$(INTEGRATION)-config.yml.sample $(CONFIG_DIR)/$(INTEGRATION)-config.yml.sample
53 |
54 | # rt-update-changelog runs the release-toolkit run.sh script by piping it into bash to update the CHANGELOG.md.
55 | # It also passes down to the script all the flags added to the make target. To check all the accepted flags,
56 | # see: https://github.com/newrelic/release-toolkit/blob/main/contrib/ohi-release-notes/run.sh
57 | # e.g. `make rt-update-changelog -- -v`
58 | rt-update-changelog:
59 | curl "https://raw.githubusercontent.com/newrelic/release-toolkit/v1/contrib/ohi-release-notes/run.sh" | bash -s -- $(filter-out $@,$(MAKECMDGOALS))
60 |
61 | # Include thematic Makefiles
62 | include $(CURDIR)/build/ci.mk
63 | include $(CURDIR)/build/release.mk
64 |
65 | .PHONY: all build clean compile test integration-test install rt-update-changelog
66 |
--------------------------------------------------------------------------------
/build/ci.mk:
--------------------------------------------------------------------------------
1 | .PHONY : ci/pull-builder-image
2 | ci/pull-builder-image:
3 | @docker pull $(BUILDER_IMAGE)
4 |
5 | .PHONY : ci/deps
6 | ci/deps: ci/pull-builder-image
7 |
8 | .PHONY : ci/debug-container
9 | ci/debug-container: ci/deps
10 | @docker run --rm -it \
11 | --name "nri-$(INTEGRATION)-debug" \
12 | -v $(CURDIR):/go/src/github.com/newrelic/nri-$(INTEGRATION) \
13 | -w /go/src/github.com/newrelic/nri-$(INTEGRATION) \
14 | -e PRERELEASE=true \
15 | -e GITHUB_TOKEN \
16 | -e REPO_FULL_NAME \
17 | -e TAG \
18 | -e GPG_MAIL \
19 | -e GPG_PASSPHRASE \
20 | -e GPG_PRIVATE_KEY_BASE64 \
21 | $(BUILDER_IMAGE) bash
22 |
23 | .PHONY : ci/test
24 | ci/test: ci/deps
25 | @docker run --rm -t \
26 | --name "nri-$(INTEGRATION)-test" \
27 | -v $(CURDIR):/go/src/github.com/newrelic/nri-$(INTEGRATION) \
28 | -w /go/src/github.com/newrelic/nri-$(INTEGRATION) \
29 | $(BUILDER_IMAGE) make test
30 |
31 | .PHONY : ci/snyk-test
32 | ci/snyk-test:
33 | @docker run --rm -t \
34 | --name "nri-$(INTEGRATION)-snyk-test" \
35 | -v $(CURDIR):/go/src/github.com/newrelic/nri-$(INTEGRATION) \
36 | -w /go/src/github.com/newrelic/nri-$(INTEGRATION) \
37 | -e SNYK_TOKEN \
38 | -e GO111MODULE=auto \
39 | snyk/snyk:golang snyk test --severity-threshold=high
40 |
41 | .PHONY : ci/build
42 | ci/build: ci/deps
43 | ifdef TAG
44 | @docker run --rm -t \
45 | --name "nri-$(INTEGRATION)-build" \
46 | -v $(CURDIR):/go/src/github.com/newrelic/nri-$(INTEGRATION) \
47 | -w /go/src/github.com/newrelic/nri-$(INTEGRATION) \
48 | -e INTEGRATION \
49 | -e TAG \
50 | $(BUILDER_IMAGE) make release/build
51 | else
52 | @echo "===> $(INTEGRATION) === [ci/build] TAG env variable expected to be set"
53 | exit 1
54 | endif
55 |
56 | .PHONY : ci/prerelease
57 | ci/prerelease: ci/deps
58 | ifdef TAG
59 | @docker run --rm -t \
60 | --name "nri-$(INTEGRATION)-prerelease" \
61 | -v $(CURDIR):/go/src/github.com/newrelic/nri-$(INTEGRATION) \
62 | -w /go/src/github.com/newrelic/nri-$(INTEGRATION) \
63 | -e INTEGRATION \
64 | -e PRERELEASE=true \
65 | -e GITHUB_TOKEN \
66 | -e REPO_FULL_NAME \
67 | -e TAG \
68 | -e GPG_MAIL \
69 | -e GPG_PASSPHRASE \
70 | -e GPG_PRIVATE_KEY_BASE64 \
71 | $(BUILDER_IMAGE) make release
72 | else
73 | @echo "===> $(INTEGRATION) === [ci/prerelease] TAG env variable expected to be set"
74 | exit 1
75 | endif
76 |
77 | .PHONY : ci/fake-prerelease
78 | ci/fake-prerelease: ci/deps
79 | ifdef TAG
80 | @docker run --rm -t \
81 | --name "nri-$(INTEGRATION)-prerelease" \
82 | -v $(CURDIR):/go/src/github.com/newrelic/nri-$(INTEGRATION) \
83 | -w /go/src/github.com/newrelic/nri-$(INTEGRATION) \
84 | -e INTEGRATION \
85 | -e PRERELEASE=true \
86 | -e NO_PUBLISH=true \
87 | -e NO_SIGN \
88 | -e GITHUB_TOKEN \
89 | -e REPO_FULL_NAME \
90 | -e TAG \
91 | -e GPG_MAIL \
92 | -e GPG_PASSPHRASE \
93 | -e GPG_PRIVATE_KEY_BASE64 \
94 | $(BUILDER_IMAGE) make release
95 | else
96 | @echo "===> $(INTEGRATION) === [ci/fake-prerelease] TAG env variable expected to be set"
97 | exit 1
98 | endif
99 |
--------------------------------------------------------------------------------
/build/nix/fix_archives.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 | #
4 | #
5 | # Gets dist/tarball_dirty created by Goreleaser (all files in root path) and reorganize files in correct path
6 | #
7 | #
8 | PROJECT_PATH=$1
9 |
10 | find dist -regex ".*_dirty\.tar.gz" | while read tarball_dirty; do
11 | echo "tarball_dirty: $tarball_dirty"
12 | tarball=${tarball_dirty/_dirty.tar.gz} # strip trailing _dirty
13 | tarball=${tarball/dist\/} # strip leading folder name
14 | echo "tarball: $tarball"
15 | TARBALL_CLEAN="${tarball}.tar.gz"
16 | echo "TARBALL_CLEAN: $TARBALL_CLEAN"
17 | TARBALL_TMP="dist/tarball_temp"
18 | TARBALL_CONTENT_PATH="${TARBALL_TMP}/${tarball}_content"
19 | mkdir -p ${TARBALL_CONTENT_PATH}/var/db/newrelic-infra/newrelic-integrations/bin/
20 | mkdir -p ${TARBALL_CONTENT_PATH}/etc/newrelic-infra/integrations.d/
21 | echo "===> Decompress ${tarball} in ${TARBALL_CONTENT_PATH}"
22 | tar -xvf ${tarball_dirty} -C ${TARBALL_CONTENT_PATH}
23 |
24 | echo "===> Move files inside ${tarball}"
25 | mv ${TARBALL_CONTENT_PATH}/nri-${INTEGRATION} "${TARBALL_CONTENT_PATH}/var/db/newrelic-infra/newrelic-integrations/bin/"
26 | mv ${TARBALL_CONTENT_PATH}/${INTEGRATION}-definition.yml ${TARBALL_CONTENT_PATH}/var/db/newrelic-infra/newrelic-integrations/
27 | mv ${TARBALL_CONTENT_PATH}/${INTEGRATION}-config.yml.sample ${TARBALL_CONTENT_PATH}/etc/newrelic-infra/integrations.d/
28 |
29 | echo "===> Creating tarball ${TARBALL_CLEAN}"
30 | cd ${TARBALL_CONTENT_PATH}
31 | tar -czvf ../${TARBALL_CLEAN} .
32 | cd $PROJECT_PATH
33 | echo "===> Moving tarball ${TARBALL_CLEAN}"
34 | mv "${TARBALL_TMP}/${TARBALL_CLEAN}" dist/
35 | echo "===> Cleaning dirty tarball ${tarball_dirty}"
36 | rm ${tarball_dirty}
37 | done
38 |
--------------------------------------------------------------------------------
/build/package/windows/nri-386-installer/Product.wxs:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
13 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
74 |
75 |
76 |
79 |
80 |
81 |
82 |
83 |
87 |
88 |
89 |
90 |
91 |
95 |
96 |
97 |
98 |
--------------------------------------------------------------------------------
/build/package/windows/nri-386-installer/nri-installer.wixproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Release
5 | x86
6 | 3.10
7 | 9fb130c1-e39d-428b-a1fa-c793c1975770
8 | 2.0
9 | nri-$(IntegrationName)-386
10 | Package
11 | C:\Program Files (x86)\Microsoft SDKs\ClickOnce\SignTool\
12 | $(MSBuildExtensionsPath32)\Microsoft\WiX\v3.x\Wix.targets
13 | $(MSBuildExtensionsPath)\Microsoft\WiX\v3.x\Wix.targets
14 | newrelic-nri-$(IntegrationName)-installer
15 | false
16 |
17 |
18 | bin\$(Configuration)\
19 | obj\$(Configuration)\
20 | Debug;ProjectRootPath=..\..\..\..\;BinariesPath=..\..\..\..\dist\nri-$(IntegrationName)_windows_386\
21 | True
22 | False
23 | -arch x86
24 |
25 |
26 | bin\$(Configuration)\
27 | obj\$(Configuration)\
28 | Debug;ProjectRootPath=..\..\..\..\;BinariesPath=..\..\..\..\dist\nri-$(IntegrationName)_windows_386\
29 | -arch x86
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
--------------------------------------------------------------------------------
/build/package/windows/nri-amd64-installer/Product.wxs:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
13 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
74 |
75 |
76 |
79 |
80 |
81 |
82 |
83 |
87 |
88 |
89 |
90 |
91 |
95 |
96 |
97 |
98 |
99 |
--------------------------------------------------------------------------------
/build/package/windows/nri-amd64-installer/nri-installer.wixproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Release
5 | x86
6 | 3.10
7 | 9fb130c1-e39d-428b-a1fa-c793c1975770
8 | 2.0
9 | nri-$(IntegrationName)-amd64
10 | Package
11 | C:\Program Files (x86)\Microsoft SDKs\ClickOnce\SignTool\
12 | $(MSBuildExtensionsPath32)\Microsoft\WiX\v3.x\Wix.targets
13 | $(MSBuildExtensionsPath)\Microsoft\WiX\v3.x\Wix.targets
14 | newrelic-nri-$(IntegrationName)-installer
15 | false
16 |
17 |
18 | bin\$(Configuration)\
19 | obj\$(Configuration)\
20 | Debug;ProjectRootPath=..\..\..\..\;BinariesPath=..\..\..\..\dist\nri-$(IntegrationName)_windows_amd64\
21 | True
22 | False
23 | -arch x64
24 |
25 |
26 | bin\$(Configuration)\
27 | obj\$(Configuration)\
28 | Debug;ProjectRootPath=..\..\..\..\;BinariesPath=..\..\..\..\dist\nri-$(IntegrationName)_windows_amd64\
29 | -arch x64
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
--------------------------------------------------------------------------------
/build/release.mk:
--------------------------------------------------------------------------------
1 | BUILD_DIR := ./bin/
2 | GORELEASER_VERSION := v2.4.4
3 | GORELEASER_BIN ?= bin/goreleaser
4 |
5 | bin:
6 | @mkdir -p $(BUILD_DIR)
7 |
8 | $(GORELEASER_BIN): bin
9 | @echo "===> $(INTEGRATION) === [$(GORELEASER_BIN)] Installing goreleaser $(GORELEASER_VERSION)"
10 | @(wget -qO /tmp/goreleaser.tar.gz https://github.com/goreleaser/goreleaser/releases/download/$(GORELEASER_VERSION)/goreleaser_$(OS_DOWNLOAD)_x86_64.tar.gz)
11 | @(tar -xf /tmp/goreleaser.tar.gz -C bin/)
12 | @(rm -f /tmp/goreleaser.tar.gz)
13 | @echo "===> $(INTEGRATION) === [$(GORELEASER_BIN)] goreleaser downloaded"
14 |
15 | .PHONY : release/clean
16 | release/clean:
17 | @echo "===> $(INTEGRATION) === [release/clean] remove build metadata files"
18 | rm -fv $(CURDIR)/src/versioninfo.json
19 | rm -fv $(CURDIR)/src/resource.syso
20 |
21 | .PHONY : release/deps
22 | release/deps: $(GORELEASER_BIN)
23 | @echo "===> $(INTEGRATION) === [release/deps] install goversioninfo"
24 | @go install github.com/josephspurrier/goversioninfo/cmd/goversioninfo@233067e5ebdfc62d994b1446a607b40ced91907b
25 |
26 | .PHONY : release/build
27 | release/build: release/deps release/clean
28 | ifeq ($(PRERELEASE), true)
29 | @echo "===> $(INTEGRATION) === [release/build] PRE-RELEASE compiling all binaries, creating packages, archives"
30 | @$(GORELEASER_BIN) release --config $(CURDIR)/build/.goreleaser.yml --clean
31 | else
32 | @echo "===> $(INTEGRATION) === [release/build] build compiling all binaries"
33 | @$(GORELEASER_BIN) build --config $(CURDIR)/build/.goreleaser.yml --snapshot --clean
34 | endif
35 |
36 | .PHONY : release/fix-archive
37 | release/fix-archive:
38 | @echo "===> $(INTEGRATION) === [release/fix-archive] fixing tar.gz archives internal structure"
39 | @bash $(CURDIR)/build/nix/fix_archives.sh $(CURDIR)
40 | @echo "===> $(INTEGRATION) === [release/fix-archive] fixing zip archives internal structure"
41 | @bash $(CURDIR)/build/windows/fix_archives.sh $(CURDIR)
42 |
43 | .PHONY : release/sign/nix
44 | release/sign/nix:
45 | ifneq ($(NO_SIGN), true)
46 | @echo "===> $(INTEGRATION) === [release/sign] signing packages"
47 | @bash sign.sh
48 | else
49 | @echo "===> $(INTEGRATION) === [release/sign] signing packages is disabled by environment variable"
50 | endif
51 |
52 | .PHONY : release/publish
53 | release/publish:
54 | ifneq ($(NO_PUBLISH), true)
55 | @echo "===> $(INTEGRATION) === [release/publish] publishing artifacts"
56 | @bash $(CURDIR)/build/upload_artifacts_gh.sh
57 | else
58 | @echo "===> $(INTEGRATION) === [release/publish] publish is disabled by environment variable"
59 | endif
60 |
61 | .PHONY : release
62 | release: release/build release/fix-archive release/sign/nix release/publish release/clean
63 | @echo "===> $(INTEGRATION) === [release/publish] full pre-release cycle complete for nix"
64 |
65 | OS := $(shell uname -s)
66 | ifeq ($(OS), Darwin)
67 | OS_DOWNLOAD := "darwin"
68 | TAR := gtar
69 | else
70 | OS_DOWNLOAD := "linux"
71 | endif
72 |
--------------------------------------------------------------------------------
/build/upload_artifacts_gh.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 | #
4 | #
5 | # Upload dist artifacts to GH Release assets
6 | #
7 | #
8 | cd dist
9 | find . -regex ".*\.\(msi\|rpm\|deb\|zip\|tar.gz\)" | while read filename; do
10 | echo "===> Uploading to GH $TAG: ${filename}"
11 | gh release upload $TAG $filename
12 | done
13 |
--------------------------------------------------------------------------------
/build/windows/download_zip.ps1:
--------------------------------------------------------------------------------
1 | param (
2 | [string]$INTEGRATION="none",
3 | [string]$ARCH="amd64",
4 | [string]$TAG="v0.0.0",
5 | [string]$REPO_FULL_NAME="none"
6 | )
7 | write-host "===> Creating dist folder"
8 | New-Item -ItemType directory -Path .\dist -Force
9 |
10 | $VERSION=${TAG}.substring(1)
11 | $zip_name="nri-${INTEGRATION}-${ARCH}.${VERSION}.zip"
12 |
13 | $zip_url="https://github.com/${REPO_FULL_NAME}/releases/download/${TAG}/${zip_name}"
14 | write-host "===> Downloading & extracting .exe from ${zip_url}"
15 | [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
16 | Invoke-WebRequest "${zip_url}" -OutFile ".\dist\${zip_name}"
17 |
--------------------------------------------------------------------------------
/build/windows/extract_exe.ps1:
--------------------------------------------------------------------------------
1 | param (
2 | [string]$INTEGRATION="none",
3 | [string]$ARCH="amd64",
4 | [string]$TAG="v0.0.0"
5 | )
6 | write-host "===> Creating dist folder"
7 | New-Item -ItemType directory -Path .\dist -Force
8 |
9 | $VERSION=${TAG}.substring(1)
10 | $exe_folder="nri-${INTEGRATION}_windows_${ARCH}"
11 | $zip_name="nri-${INTEGRATION}-${ARCH}.${VERSION}.zip"
12 |
13 | write-host "===> Expanding"
14 | expand-archive -path "dist\${zip_name}" -destinationpath "dist\${exe_folder}\"
15 |
--------------------------------------------------------------------------------
/build/windows/fix_archives.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 | #
4 | #
5 | # Gets dist/zip_dirty created by Goreleaser and reorganize inside files
6 | #
7 | #
8 | PROJECT_PATH=$1
9 |
10 | find dist -regex ".*_dirty\.zip" | while read zip_dirty; do
11 | zip_file_name=${zip_dirty/_dirty.zip} # Strips suffix
12 | zip_file_name=${zip_file_name/dist\/} # Strip folder name
13 | ZIP_CLEAN="${zip_file_name}.zip"
14 | ZIP_TMP="dist/zip_temp"
15 | ZIP_CONTENT_PATH="${ZIP_TMP}/${zip_file_name}_content"
16 |
17 | mkdir -p "${ZIP_CONTENT_PATH}"
18 |
19 | AGENT_DIR_IN_ZIP_PATH="${ZIP_CONTENT_PATH}/New Relic/newrelic-infra/newrelic-integrations/"
20 | CONF_IN_ZIP_PATH="${ZIP_CONTENT_PATH}/New Relic/newrelic-infra/integrations.d/"
21 |
22 | mkdir -p "${AGENT_DIR_IN_ZIP_PATH}/bin"
23 | mkdir -p "${CONF_IN_ZIP_PATH}"
24 |
25 | echo "===> Decompress ${zip_file_name} in ${ZIP_CONTENT_PATH}"
26 | unzip ${zip_dirty} -d ${ZIP_CONTENT_PATH}
27 |
28 | echo "===> Move files inside ${zip_file_name}"
29 | mv ${ZIP_CONTENT_PATH}/nri-${INTEGRATION}.exe "${AGENT_DIR_IN_ZIP_PATH}/bin"
30 | mv ${ZIP_CONTENT_PATH}/${INTEGRATION}-win-definition.yml "${AGENT_DIR_IN_ZIP_PATH}"
31 | mv ${ZIP_CONTENT_PATH}/${INTEGRATION}-config.yml.sample "${CONF_IN_ZIP_PATH}"
32 |
33 | echo "===> Creating zip ${ZIP_CLEAN}"
34 | cd "${ZIP_CONTENT_PATH}"
35 | zip -r ../${ZIP_CLEAN} .
36 | cd $PROJECT_PATH
37 | echo "===> Moving zip ${ZIP_CLEAN}"
38 | mv "${ZIP_TMP}/${ZIP_CLEAN}" dist/
39 | echo "===> Cleaning dirty zip ${zip_dirty}"
40 | rm "${zip_dirty}"
41 | done
--------------------------------------------------------------------------------
/build/windows/package_msi.ps1:
--------------------------------------------------------------------------------
1 | <#
2 | .SYNOPSIS
3 | This script creates the win .MSI
4 | #>
5 | param (
6 | # Target architecture: amd64 (default) or 386
7 | [string]$integration="none",
8 | [ValidateSet("amd64", "386")]
9 | [string]$arch="amd64",
10 | [string]$tag="v0.0.0",
11 | [string]$pfx_passphrase="none",
12 | [string]$pfx_certificate_description="none"
13 | )
14 |
15 | $buildYear = (Get-Date).Year
16 |
17 | $version=$tag.substring(1)
18 |
19 | # verifying version number format
20 | $v = $version.Split(".")
21 |
22 | if ($v.Length -ne 3) {
23 | echo "-version must follow a numeric major.minor.patch semantic versioning schema (received: $version)"
24 | exit -1
25 | }
26 |
27 | $wrong = $v | ? { (-Not [System.Int32]::TryParse($_, [ref]0)) -or ( $_.Length -eq 0) -or ([int]$_ -lt 0)} | % { 1 }
28 | if ($wrong.Length -ne 0) {
29 | echo "-version major, minor and patch must be valid positive integers (received: $version)"
30 | exit -1
31 | }
32 |
33 | $noSign = $env:NO_SIGN ?? "false"
34 | if ($noSign -ieq "true") {
35 | echo "===> Import .pfx certificate is disabled by environment variable"
36 | } else {
37 | echo "===> Import .pfx certificate from GH Secrets"
38 | Import-PfxCertificate -FilePath wincert.pfx -Password (ConvertTo-SecureString -String $pfx_passphrase -AsPlainText -Force) -CertStoreLocation Cert:\CurrentUser\My
39 |
40 | echo "===> Show certificate installed"
41 | Get-ChildItem -Path cert:\CurrentUser\My\
42 | }
43 |
44 | echo "===> Checking MSBuild.exe..."
45 | $msBuild = (Get-ItemProperty hklm:\software\Microsoft\MSBuild\ToolsVersions\4.0).MSBuildToolsPath
46 | if ($msBuild.Length -eq 0) {
47 | echo "Can't find MSBuild tool. .NET Framework 4.0.x must be installed"
48 | exit -1
49 | }
50 | echo $msBuild
51 |
52 | echo "===> Building Installer"
53 | Push-Location -Path "build\package\windows\nri-$arch-installer"
54 |
55 | . $msBuild/MSBuild.exe nri-installer.wixproj /p:IntegrationVersion=${version} /p:IntegrationName=$integration /p:Year=$buildYear /p:NoSign=$noSign /p:pfx_certificate_description=$pfx_certificate_description
56 | if (-not $?)
57 | {
58 | echo "Failed building installer"
59 | Pop-Location
60 | exit -1
61 | }
62 |
63 | echo "===> Making versioned installed copy"
64 | cd bin\Release
65 | cp "nri-$integration-$arch.msi" "nri-$integration-$arch.$version.msi"
66 |
67 | Pop-Location
68 |
--------------------------------------------------------------------------------
/build/windows/set_exe_properties.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 | #
4 | #
5 | # Create the metadata for the exe's files, called by .goreleser as a hook in the build section
6 | #
7 | #
8 | TAG=$1
9 | INTEGRATION=$2
10 |
11 | if [ -n "$1" ]; then
12 | echo "===> Tag is ${TAG}"
13 | else
14 | # todo: exit here with error?
15 | echo "===> Tag not specified will be 0.0.0"
16 | TAG='0.0.0'
17 | fi
18 |
19 | MajorVersion=$(echo ${TAG:1} | cut -d "." -f 1)
20 | MinorVersion=$(echo ${TAG:1} | cut -d "." -f 2)
21 | PatchVersion=$(echo ${TAG:1} | cut -d "." -f 3)
22 | BuildVersion='0'
23 |
24 | Year=$(date +"%Y")
25 | INTEGRATION_EXE="nri-${INTEGRATION}.exe"
26 |
27 | sed \
28 | -e "s/{MajorVersion}/$MajorVersion/g" \
29 | -e "s/{MinorVersion}/$MinorVersion/g" \
30 | -e "s/{PatchVersion}/$PatchVersion/g" \
31 | -e "s/{BuildVersion}/$BuildVersion/g" \
32 | -e "s/{Year}/$Year/g" \
33 | -e "s/{Integration}/nri-$INTEGRATION/g" \
34 | -e "s/{IntegrationExe}/$INTEGRATION_EXE/g" \
35 | ./build/windows/versioninfo.json.template > ./src/versioninfo.json
36 |
37 | go generate github.com/newrelic/nri-${INTEGRATION}/src/
--------------------------------------------------------------------------------
/build/windows/unit_tests.ps1:
--------------------------------------------------------------------------------
1 | echo "--- Running tests"
2 |
3 | go test ./src/...
4 | if (-not $?)
5 | {
6 | echo "Failed running tests"
7 | exit -1
8 | }
--------------------------------------------------------------------------------
/build/windows/upload_msi.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 | #
4 | #
5 | # Upload msi artifacts to GH Release assets
6 | #
7 | #
8 | INTEGRATION=$1
9 | ARCH=$2
10 | TAG=$3
11 |
12 | gh release upload "$TAG" "build/package/windows/nri-${ARCH}-installer/bin/Release/nri-${INTEGRATION}-${ARCH}.${TAG:1}.msi" --repo "$REPO_FULL_NAME"
13 |
--------------------------------------------------------------------------------
/build/windows/versioninfo.json.template:
--------------------------------------------------------------------------------
1 | {
2 | "FixedFileInfo":
3 | {
4 | "FileVersion": {
5 | "Major": {MajorVersion},
6 | "Minor": {MinorVersion},
7 | "Patch": {PatchVersion},
8 | "Build": {BuildVersion}
9 | },
10 | "ProductVersion": {
11 | "Major": {MajorVersion},
12 | "Minor": {MinorVersion},
13 | "Patch": {PatchVersion},
14 | "Build": {BuildVersion}
15 | },
16 | "FileFlagsMask": "3f",
17 | "FileFlags ": "00",
18 | "FileOS": "040004",
19 | "FileType": "01",
20 | "FileSubType": "00"
21 | },
22 | "StringFileInfo":
23 | {
24 | "Comments": "(c) {Year} New Relic, Inc.",
25 | "CompanyName": "New Relic, Inc.",
26 | "FileDescription": "",
27 | "FileVersion": "{MajorVersion}.{MinorVersion}.{PatchVersion}.{BuildVersion}",
28 | "InternalName": "{Integration}",
29 | "LegalCopyright": "(c) {Year} New Relic, Inc.",
30 | "LegalTrademarks": "",
31 | "OriginalFilename": "{IntegrationExe}",
32 | "PrivateBuild": "",
33 | "ProductName": "New Relic Infrastructure Integration, {Integration}",
34 | "ProductVersion": "{MajorVersion}.{MinorVersion}.{PatchVersion}.{BuildVersion}",
35 | "SpecialBuild": ""
36 | },
37 | "VarFileInfo":
38 | {
39 | "Translation": {
40 | "LangID": "0409",
41 | "CharsetID": "04B0"
42 | }
43 | }
44 | }
--------------------------------------------------------------------------------
/go.mod:
--------------------------------------------------------------------------------
1 | module github.com/newrelic/nri-postgresql
2 |
3 | go 1.23.5
4 |
5 | require (
6 | github.com/blang/semver/v4 v4.0.0
7 | github.com/jmoiron/sqlx v1.4.0
8 | github.com/lib/pq v1.10.9
9 | github.com/newrelic/infra-integrations-sdk/v3 v3.9.1
10 | github.com/stretchr/testify v1.10.0
11 | github.com/xeipuuv/gojsonschema v1.2.0
12 | gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0
13 | gopkg.in/yaml.v3 v3.0.1
14 | github.com/go-viper/mapstructure/v2 v2.2.1
15 | )
16 |
17 | require (
18 | github.com/davecgh/go-spew v1.1.1 // indirect
19 | github.com/kr/text v0.2.0 // indirect
20 | github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect
21 | github.com/pmezard/go-difflib v1.0.0 // indirect
22 | github.com/stretchr/objx v0.5.2 // indirect
23 | github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect
24 | github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
25 | gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
26 | )
27 |
--------------------------------------------------------------------------------
/go.sum:
--------------------------------------------------------------------------------
1 | filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
2 | filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
3 | github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM=
4 | github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ=
5 | github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
6 | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
7 | github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
8 | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
9 | github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
10 | github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
11 | github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
12 | github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
13 | github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o=
14 | github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
15 | github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
16 | github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
17 | github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
18 | github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
19 | github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
20 | github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
21 | github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
22 | github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
23 | github.com/newrelic/infra-integrations-sdk/v3 v3.9.1 h1:dCtVLsYNHWTQ5aAlAaHroomOUlqxlGTrdi6XTlvBDfI=
24 | github.com/newrelic/infra-integrations-sdk/v3 v3.9.1/go.mod h1:yPeidhcq9Cla0QDquGXH0KqvS2k9xtetFOD7aLA0Z8M=
25 | github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
26 | github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
27 | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
28 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
29 | github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
30 | github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
31 | github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
32 | github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
33 | github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
34 | github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
35 | github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
36 | github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
37 | github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=
38 | github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
39 | github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
40 | github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
41 | github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
42 | github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
43 | gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 h1:FVCohIoYO7IJoDDVpV2pdq7SgrMH6wHnuTyrdrxJNoY=
44 | gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0/go.mod h1:OdE7CF6DbADk7lN8LIKRzRJTTZXIjtWgA5THM5lhBAw=
45 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
46 | gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
47 | gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
48 | gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
49 | gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
50 |
--------------------------------------------------------------------------------
/legacy/postgresql-definition.yml:
--------------------------------------------------------------------------------
1 | name: com.newrelic.postgresql
2 | description: Reports status and metrics for postgresql service
3 | protocol_version: 2
4 | os: linux
5 |
6 | commands:
7 | all_data:
8 | command:
9 | - ./bin/nri-postgresql
10 | prefix: config/postgresql
11 | interval: 15
12 | metrics:
13 | command:
14 | - ./bin/nri-postgresql
15 | - --metrics
16 | prefix: config/postgresql
17 | interval: 15
18 | inventory:
19 | command:
20 | - ./bin/nri-postgresql
21 | - --inventory
22 | prefix: config/postgresql
23 | interval: 15
24 |
--------------------------------------------------------------------------------
/legacy/postgresql-win-definition.yml:
--------------------------------------------------------------------------------
1 | name: com.newrelic.postgresql
2 | description: Reports status and metrics for postgresql service
3 | protocol_version: 2
4 | os: windows
5 |
6 | commands:
7 | all_data:
8 | command:
9 | - .\bin\nri-postgresql.exe
10 | prefix: config/postgresql
11 | interval: 15
12 | metrics:
13 | command:
14 | - .\bin\nri-postgresql.exe
15 | - --metrics
16 | prefix: config/postgresql
17 | interval: 15
18 | inventory:
19 | command:
20 | - .\bin\nri-postgresql.exe
21 | - --inventory
22 | prefix: config/postgresql
23 | interval: 15
24 |
--------------------------------------------------------------------------------
/postgresql-config.yml.k8s_sample:
--------------------------------------------------------------------------------
1 | postgresql-config.yml: |
2 | ---
3 | # Run auto discovery to find pods with label "app=postgresql"
4 | # https://docs.newrelic.com/docs/integrations/host-integrations/installation/container-auto-discovery
5 | discovery:
6 | command:
7 | # Use the following optional arguments:
8 | # --namespaces: Comma separated list of namespaces to discover pods on
9 | # --tls: Use secure (TLS) connection
10 | # --port: Port used to connect to the kubelet. Default is 10255
11 | exec: /var/db/newrelic-infra/nri-discovery-kubernetes
12 | match:
13 | label.app: postgresql
14 | integrations:
15 | - name: nri-postgresql
16 | env:
17 | USERNAME:
18 | PASSWORD:
19 | # Using the discovered IP as the host address
20 | HOSTNAME: ${discovery.ip}
21 | # The database to connect to on the postgres instance. Defaults to postgres.
22 | # DATABASE: postgres
23 | # The port of the postgres instance. If PgBouncer is being used,
24 | # use the port it is running on. Defaults to 5432
25 | PORT: 5432
26 | # Collection List can be either a JSON array or a JSON object.
27 | #
28 | # If it is a JSON array, it will be interpreted as a list of database names to
29 | # collect all related metrics from. This will collect metrics for each database
30 | # specified, as well as all tables and indexes that belong to that database.
31 | # Example:
32 | # COLLECTION_LIST: '["postgres"]'
33 | #
34 | # If it is a JSON object, you can more finely tune the entities that are collected.
35 | # Only the entities that are specified in the object will be collected. No automatic
36 | # discovery will be performed.
37 | # The levels of JSON are database name -> schema name -> table name -> index name
38 | # Example:
39 | # collection_list: '{"postgres":{"public":{"pg_table1":["pg_index1","pg_index2"],"pg_table2":[]}}}'
40 | COLLECTION_LIST: '["postgres"]'
41 |
42 | # JSON array of database names that will be ignored for metrics collection.
43 | # Typically useful for cases where COLLECTION_LIST is set to 'ALL' and some databases need to be ignored.
44 | # Defaults to empty '[]'.
45 | # Example:
46 | # COLLECTION_IGNORE_DATABASE_LIST: '["azure_maintenance","azure_sys"]'
47 | #
48 | # COLLECTION_IGNORE_DATABASE_LIST: '[]'
49 |
50 | # JSON array of table names that will be ignored for metrics collection.
51 | # Defaults to empty '[]'.
52 | # Example:
53 | # COLLECTION_IGNORE_TABLE_LIST: '["table1","table2"]'
54 |
55 | # True if database lock metrics should be collected
56 | # Note: requires that the `tablefunc` extension be installed on the public schema
57 | # of the database where lock metrics will be collected.
58 | COLLECT_DB_LOCK_METRICS: false
59 | ENABLE_SSL: true
60 | # True if the SSL certificate should be trusted without validating.
61 | # Setting this to true may open up the monitoring service to MITM attacks.
62 | # Defaults to false.
63 | TRUST_SERVER_CERTIFICATE: false
64 | SSL_ROOT_CERT_LOCATION: /etc/newrelic-infra/root_cert.crt
65 | SSL_CERT_LOCATION: /etc/newrelic-infra/postgresql.crt
66 | SSL_KEY_LOCATION: /etc/newrelic-infra/postgresql.key
67 | TIMEOUT: 10
68 |
69 | CUSTOM_METRICS_QUERY: >-
70 | select
71 | 'rows_inserted' as "metric_name",
72 | 'delta' as "metric_type",
73 | sd.tup_inserted as "metric_value",
74 | sd.datid as "database_id"
75 | from pg_stat_database sd;
76 | labels:
77 | env: production
78 | role: postgresql
79 |
--------------------------------------------------------------------------------
/postgresql-config.yml.sample:
--------------------------------------------------------------------------------
1 | integrations:
2 | - name: nri-postgresql
3 | env:
4 | # The username for the postgres instance. Required.
5 | USERNAME: postgres
6 | # The password for the postgres instance. Required.
7 | PASSWORD: 'pass'
8 |
9 | # The hostname for the postgres instance. Defaults to localhost.
10 | HOSTNAME: psql-sample.localnet
11 |
12 | # The database to connect to on the postgres instance. Defaults to postgres.
13 | # DATABASE: postgres
14 |
15 | # The port of the postgres instance. If PgBouncer is being used,
16 | # use the port it is running on. Defaults to 5432
17 | PORT: "6432"
18 |
19 | # Collection List can be either a JSON array, a JSON object, or the string literal 'ALL'.
20 | #
21 | # If it is a JSON array, it will be interpreted as a list of database names to
22 | # collect all related metrics from. This will collect metrics for each database
23 | # specified, as well as all tables and indexes that belong to that database.
24 | # Example:
25 | # COLLECTION_LIST: '["postgres"]'
26 | # If it is the string literal 'ALL', it will collect metrics for all databases, schemas, tables, and indexes
27 | # Example:
28 | # COLLECTION_LIST: 'ALL'
29 | COLLECTION_LIST: '["postgres"]'
30 |
31 | # JSON array of database names that will be ignored for metrics collection.
32 | # Typically useful for cases where COLLECTION_LIST is set to 'ALL' and some databases need to be ignored.
33 | # Defaults to empty '[]'.
34 | # Example:
35 | # COLLECTION_IGNORE_DATABASE_LIST: '["azure_maintenance","azure_sys"]'
36 |
37 | # JSON array of table names that will be ignored for metrics collection.
38 | # Defaults to empty '[]'.
39 | # Example:
40 | # COLLECTION_IGNORE_TABLE_LIST: '["table1","table2"]'
41 |
42 | # True if database lock metrics should be collected
43 | # Note: requires that the `tablefunc` extension be installed on the public schema
44 | # of the database where lock metrics will be collected.
45 | COLLECT_DB_LOCK_METRICS: "false"
46 |
47 | # Enable collecting bloat metrics which can be performance intensive
48 | COLLECT_BLOAT_METRICS: "true"
49 |
50 | # True if SSL is to be used. Defaults to false.
51 | ENABLE_SSL: "false"
52 |
53 | # Enable query performance monitoring - Defaults to false
54 | # ENABLE_QUERY_MONITORING : "false"
55 |
56 | # Threshold in milliseconds for query response time to fetch individual query performance metrics - Defaults to 500
57 | # QUERY_MONITORING_RESPONSE_TIME_THRESHOLD : "500"
58 |
59 | # The number of records for each query performance metrics - Defaults to 20
60 | # QUERY_MONITORING_COUNT_THRESHOLD : "20"
61 |
62 | # True if the SSL certificate should be trusted without validating.
63 | # Setting this to true may open up the monitoring service to MITM attacks.
64 | # Defaults to false.
65 | # This setting will need to be set to true for managed database environments like Azure Flexbile or AWS RDS/Aurora
66 | TRUST_SERVER_CERTIFICATE: "false"
67 |
68 | # SSL_CERT_LOCATION: /etc/newrelic-infra/postgresql.crt
69 | # SSL_KEY_LOCATION: /etc/newrelic-infra/postgresql.key
70 | # SSL_ROOT_CERT_LOCATION: /etc/newrelic-infra/root_cert.crt
71 | TIMEOUT: "10"
72 |
73 | # A SQL query to collect custom metrics. Must have the columns metric_name, metric_type, and metric_value. Additional columns are added as attributes
74 | # CUSTOM_METRICS_QUERY: >-
75 | # select
76 | # 'rows_inserted' as "metric_name",
77 | # 'delta' as "metric_type",
78 | # sd.tup_inserted as "metric_value",
79 | # sd.datid as "database_id"
80 | # from pg_stat_database sd;
81 |
82 | # Can be use instead of CUSTOM_METRICS_QUERY to specify the path to a
83 | # YAML configuration with one or more custom SQL queries to collect
84 | # For more information check https://docs.newrelic.com/docs/integrations/host-integrations/host-integrations-list/postgresql-monitoring-integration/#example-postgresSQL-config
85 | # CUSTOM_METRICS_CONFIG: /path/to/postgresql-custom-query.yml
86 |
87 | interval: 15s
88 | labels:
89 | env: production
90 | role: postgresql
91 | inventory_source: config/postgresql
92 |
--------------------------------------------------------------------------------
/postgresql-custom-query.yml.sample:
--------------------------------------------------------------------------------
1 | ---
2 | queries:
3 |
4 | # Metric names are set to the column names in the query results
5 | - query: >-
6 | SELECT
7 | BG.checkpoints_timed AS scheduled_checkpoints_performed,
8 | BG.checkpoints_req AS requested_checkpoints_performed,
9 | BG.buffers_checkpoint AS buffers_written_during_checkpoint,
10 | BG.buffers_clean AS buffers_written_by_background_writer,
11 | BG.maxwritten_clean AS background_writer_stops,
12 | BG.buffers_backend AS buffers_written_by_backend,
13 | BG.buffers_alloc AS buffers_allocated
14 | FROM pg_stat_bgwriter BG;
15 |
16 | # database defaults to the auth database in the main config
17 | database: postgres
18 |
19 | # If not set explicitly here, metric type will default to
20 | # 'gauge' for numbers and 'attribute' for strings
21 | metric_types:
22 | buffers_allocated: rate
23 |
24 | # If unset, sample_name defaults to PostgresCustomSample
25 | sample_name: MyCustomSample
26 |
27 | # Query to collect unused indexes. This query needs to repeat for every user database to collect data from all of them.
28 | - query: >-
29 | SELECT schemaname, CAST(relname as varchar(100)), CAST(indexrelname as varchar(100)), idx_scan, idx_tup_fetch, idx_tup_read,
30 | pg_size_pretty(pg_relation_size(indexrelid)) as idx_size,
31 | pg_size_pretty(sum(pg_relation_size(indexrelid))
32 | OVER (ORDER BY idx_scan, indexrelid)) as total
33 | FROM pg_stat_user_indexes
34 | WHERE idx_scan=0
35 | AND idx_tup_fetch=0
36 | AND idx_tup_read=0
37 | LIMIT 25;
38 |
39 | # database defaults to the auth database in the main config
40 | # database: postgres
41 |
42 | # If unset, sample_name defaults to PostgresCustomSample
43 | sample_name: PostgresUnusedIndexesSample
44 |
45 | # Query to collect missing indexes. This query needs to repeat for every user database to collect data from all of them.
46 | - query: >-
47 | SELECT schemaname, CAST(relname as varchar(100)), seq_scan, seq_tup_read, seq_tup_read/seq_scan as avg, idx_scan
48 | FROM pg_stat_user_tables
49 | WHERE seq_scan > 0
50 | LIMIT 25;
51 |
52 | # database defaults to the auth database in the main config
53 | # database: postgres
54 |
55 | # If unset, sample_name defaults to PostgresCustomSample
56 | sample_name: PostgresMissingIndexesSample
57 |
58 | # Query to collect most expensive queries. This query needs to repeat for every user database to collect data from all of them.
59 | # Note this extension may not be enabled on your server.
60 | # In the main postgres-config.yml file it is best to let it default to the postgres database when connecting.
61 | ## For AWS RDS environments pg_stat_statements will be available by default depending on your version.
62 | ## AWS link to check: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_PostgreSQL.html#PostgreSQL.Concepts.General.FeatureSupport.Extensions
63 | ## For AWS Aurora instances running Postgres 13+ it may be necessary to manually create the pg_stat_statement extension with the following command:
64 | ### CREATE EXTENSION pg_stat_statements;
65 | # For standalone instances it must be added to your postgresql.conf file.
66 | ## Link here: https://www.postgresql.org/docs/current/pgstatstatements.html
67 | # Uncomment this query when the pg_stat_statement extension has been added to the shared_preload_libraries
68 | #- query: >-
69 | # SELECT CAST(d.datname as varchar(100)) as databasename,
70 | # CAST(u.usename as varchar(100)) as username,
71 | # round(( 100 * s.total_time / sum(s.total_time) over ())::smallint) as percent,
72 | # CAST(s.total_time as int), s.calls as total_calls, s.rows as total_rows,
73 | # round(s.mean_time::int) as mean_time, substring(s.query, 1, 4000) as query
74 | # FROM pg_stat_statements s
75 | # JOIN pg_database d ON (s.dbid = d.oid)
76 | # JOIN pg_user u ON (s.userid = u.usesysid)
77 | # ORDER BY s.total_time DESC
78 | # LIMIT 50;
79 |
80 | # database defaults to the auth database in the main config
81 | # database: postgres
82 |
83 | # Use this version of the query if running Postgres 13+
84 | #- query: >-
85 | # SELECT CAST(d.datname as varchar(100)) as databasename,
86 | # CAST(u.usename as varchar(100)) as username,
87 | # round(( 100 * s.total_exec_time / sum(s.total_exec_time) over ())::smallint) as percent,
88 | # CAST(s.total_exec_time as int), s.calls as total_calls, s.rows as total_rows,
89 | # round(s.mean_exec_time::int) as mean_time, substring(s.query, 1, 4000) as query
90 | # FROM pg_stat_statements s
91 | # JOIN pg_database d ON (s.dbid = d.oid)
92 | # JOIN pg_user u ON (s.userid = u.usesysid)
93 | # ORDER BY s.total_exec_time DESC
94 | # LIMIT 50;
95 |
96 | # database defaults to the auth database in the main config
97 | # database: postgres
98 |
99 | # If unset, sample_name defaults to PostgresCustomSample
100 | # sample_name: PostgresExpensiveQueriesSample
101 |
--------------------------------------------------------------------------------
/postgresql-log.yml.example:
--------------------------------------------------------------------------------
1 | ###############################################################################
2 | # Log forwarder configuration for Postgresql logs #
3 | # NOTE: Postgres installations can vary. Use the appropriate file: #
4 | # log location that matches your environment/installation and version #
5 | # Source: file #
6 | # Available customization parameters: attributes, max_line_kb, pattern #
7 | ###############################################################################
8 | logs:
9 | - name: postgresql
10 | file: /var/lib/pgsql/12/data/log/postgresql*.log
11 | # file: /var/log/postgresql/*.log
12 | attributes:
13 | logtype: postgresql
14 |
--------------------------------------------------------------------------------
/queries/database_queries.pgsql:
--------------------------------------------------------------------------------
1 | -- min version 9.1.0
2 | SELECT
3 | D.datname,
4 | SD.numbackends AS active_connections,
5 | SD.xact_commit AS transactions_committed,
6 | SD.xact_rollback AS transactions_rolled_back,
7 | SD.blks_read AS block_reads,
8 | SD.blks_hit AS buffer_hits,
9 | SD.tup_returned AS rows_returned,
10 | SD.tup_fetched AS rows_fetched,
11 | SD.tup_inserted AS rows_inserted,
12 | SD.tup_updated AS rows_updated,
13 | SD.tup_deleted AS rows_deleted,
14 | DBC.confl_tablespace AS queries_canceled_due_to_dropped_tablespaces,
15 | DBC.confl_lock AS queries_canceled_due_to_lock_timeouts,
16 | DBC.confl_snapshot AS queries_canceled_due_to_old_snapshots,
17 | DBC.confl_snapshot AS queries_canceled_due_to_old_snapshots,
18 | DBC.confl_bufferpin AS queries_canceled_due_to_pinned_buffers,
19 | DBC.confl_deadlock AS queries_canceled_due_to_deadlocks
20 | FROM pg_stat_database SD
21 | INNER JOIN pg_database D ON D.datname = SD.datname
22 | INNER JOIN pg_stat_database_conflicts DBC ON DBC.datname = D.datname
23 | LEFT JOIN pg_tablespace TS ON TS.oid = D.dattablespace
24 | WHERE D.datistemplate = FALSE AND D.datname IS NOT NULL;
25 | -- need to append a 'AND D.datname IN (comma separated DB list)'
26 |
27 | -- min version 9.2.0
28 | SELECT
29 | SD.temp_files AS temporary_files_created,
30 | SD.temp_bytes AS temporary_bytes_written,
31 | SD.deadlocks AS deadlocks,
32 | cast(SD.blk_read_time AS bigint) AS time_spent_reading_data,
33 | cast(SD.blk_write_time AS bigint) AS time_spent_writing_data
34 | FROM pg_stat_database SD
35 | INNER JOIN pg_database D ON D.datname = SD.datname
36 | INNER JOIN pg_stat_database_conflicts DBC ON DBC.datname = D.datname
37 | LEFT JOIN pg_tablespace TS ON TS.oid = D.dattablespace
38 | WHERE D.datistemplate = FALSE AND D.datname IS NOT NULL;
39 | -- need to append a 'AND D.datname IN (comma separated DB list)'
40 |
41 |
42 | -- max version 9.0.999
43 | SELECT
44 | SD.numbackends AS active_connections,
45 | SD.xact_commit AS transactions_committed,
46 | SD.xact_rollback AS transactions_rolled_back,
47 | SD.blks_read AS block_reads,
48 | SD.blks_hit AS buffer_hits,
49 | SD.tup_returned AS rows_returned,
50 | SD.tup_fetched AS rows_fetched,
51 | SD.tup_inserted AS rows_inserted,
52 | SD.tup_updated AS rows_updated,
53 | SD.tup_deleted AS rows_deleted
54 | FROM pg_stat_database SD
55 | INNER JOIN pg_database D ON D.datname = SD.datname
56 | LEFT JOIN pg_tablespace TS ON TS.oid = D.dattablespace
57 | WHERE D.datistemplate = FALSE AND D.datname IS NOT NULL;
58 | -- need to append a 'AND D.datname IN (comma separated DB list)'
59 |
--------------------------------------------------------------------------------
/queries/instance_queries.pgsql:
--------------------------------------------------------------------------------
1 | Select
2 | BG.checkpoints_timed AS scheduled_checkpoints_performed,
3 | BG.checkpoints_req AS requested_checkpoints_performed,
4 | BG.buffers_checkpoint AS buffers_written_during_checkpoint,
5 | BG.buffers_clean AS buffers_written_by_background_writer,
6 | BG.maxwritten_clean AS imes_background_writer_stopped_due_to_too_many_buffers,
7 | BG.buffers_backend AS buffers_written_by_backend,
8 | BG.buffers_alloc AS buffers_allocated
9 | FROM pg_stat_bgwriter BG;
10 |
11 | -- Requires 9.1
12 | Select
13 | BG.buffers_backend_fsync AS times_backend_executed_own_fsync
14 | FROM pg_stat_bgwriter BG;
15 |
16 | -- Requires 9.2
17 | Select
18 | cast(BG.checkpoint_write_time AS bigint) AS time_writing_checkpoint_files_to_disk,
19 | cast(BG.checkpoint_sync_time AS bigint) AS time_synchronizing_checkpoint_files_to_disk
20 | FROM pg_stat_bgwriter BG;
21 |
--------------------------------------------------------------------------------
/queries/inventory.pgsql:
--------------------------------------------------------------------------------
1 | SELECT name, setting, boot_val, reset_val FROM pg_settings;
2 |
3 | -- version query
4 | SHOW server_version;
5 |
--------------------------------------------------------------------------------
/src/args/argument_list.go:
--------------------------------------------------------------------------------
1 | // Package args contains the argument list, defined as a struct, along with a method that validates passed-in args
2 | package args
3 |
4 | import (
5 | "errors"
6 | sdkArgs "github.com/newrelic/infra-integrations-sdk/v3/args"
7 | "github.com/newrelic/infra-integrations-sdk/v3/log"
8 | )
9 |
10 | // ArgumentList struct that holds all PostgreSQL arguments
11 | type ArgumentList struct {
12 | sdkArgs.DefaultArgumentList
13 | Username string `default:"" help:"The username for the PostgreSQL database"`
14 | Password string `default:"" help:"The password for the specified username"`
15 | Hostname string `default:"localhost" help:"The PostgreSQL hostname to connect to"`
16 | Database string `default:"postgres" help:"The PostgreSQL database name to connect to"`
17 | Port string `default:"5432" help:"The port to connect to the PostgreSQL database"`
18 | CollectionList string `default:"{}" help:"A JSON object which defines the databases, schemas, tables, and indexes to collect. Can also be a JSON array that list databases to be collected. Can also be the string literal 'ALL' to collect everything. Collects nothing by default."`
19 | CollectionIgnoreDatabaseList string `default:"[]" help:"A JSON array that list databases that will be excluded from collection. Nothing is excluded by default."`
20 | CollectionIgnoreTableList string `default:"[]" help:"A JSON array that list tables that will be excluded from collection. Nothing is excluded by default."`
21 | SSLRootCertLocation string `default:"" help:"Absolute path to PEM encoded root certificate file"`
22 | SSLCertLocation string `default:"" help:"Absolute path to PEM encoded client cert file"`
23 | SSLKeyLocation string `default:"" help:"Absolute path to PEM encoded client key file"`
24 | Timeout string `default:"10" help:"Maximum wait for connection, in seconds. Set 0 for no timeout"`
25 | CustomMetricsQuery string `default:"" help:"A SQL query to collect custom metrics. Must have the columns metric_name, metric_type, and metric_value. Additional columns are added as attributes"`
26 | CustomMetricsConfig string `default:"" help:"YAML configuration with one or more custom SQL queries to collect"`
27 | EnableSSL bool `default:"false" help:"If true will use SSL encryption, false will not use encryption"`
28 | TrustServerCertificate bool `default:"false" help:"If true server certificate is not verified for SSL. If false certificate will be verified against supplied certificate"`
29 | Pgbouncer bool `default:"false" help:"Collects metrics from PgBouncer instance. Assumes connection is through PgBouncer."`
30 | CollectDbLockMetrics bool `default:"false" help:"If true, enables collection of lock metrics for the specified database. (Note: requires that the 'tablefunc' extension is installed)"` //nolint: stylecheck
31 | CollectBloatMetrics bool `default:"true" help:"Enable collecting bloat metrics which can be performance intensive"`
32 | ShowVersion bool `default:"false" help:"Print build information and exit"`
33 | EnableQueryMonitoring bool `default:"false" help:"Enable collection of detailed query performance metrics."`
34 | QueryMonitoringResponseTimeThreshold int `default:"500" help:"Threshold in milliseconds for query response time. If response time for the individual query exceeds this threshold, the individual query is reported in metrics"`
35 | QueryMonitoringCountThreshold int `default:"20" help:"The number of records for each query performance metrics"`
36 | }
37 |
38 | // Validate validates PostgreSQl arguments
39 | func (al ArgumentList) Validate() error {
40 | if al.Username == "" || al.Password == "" {
41 | return errors.New("invalid configuration: must specify a username and password")
42 | }
43 | if err := al.validateSSL(); err != nil {
44 | return err
45 | }
46 | return nil
47 | }
48 |
49 | func (al ArgumentList) validateSSL() error {
50 | if al.EnableSSL {
51 | if !al.TrustServerCertificate && al.SSLRootCertLocation == "" {
52 | return errors.New("invalid configuration: must specify a certificate file when using SSL and not trusting server certificate")
53 | }
54 |
55 | if al.SSLCertLocation == "" || al.SSLKeyLocation == "" {
56 | log.Warn("potentially invalid configuration: client cert and/or key file not present when SSL is enabled")
57 | }
58 | }
59 |
60 | return nil
61 | }
62 |
--------------------------------------------------------------------------------
/src/args/argument_list_test.go:
--------------------------------------------------------------------------------
1 | package args
2 |
3 | import (
4 | "testing"
5 | )
6 |
7 | func TestValidate(t *testing.T) {
8 | testCases := []struct {
9 | name string
10 | arg *ArgumentList
11 | wantError bool
12 | }{
13 | {
14 | "No Errors",
15 | &ArgumentList{
16 | Username: "user",
17 | Password: "password",
18 | Hostname: "localhost",
19 | Port: "90",
20 | CollectionList: "{}",
21 | },
22 | false,
23 | },
24 | {
25 | "No Username",
26 | &ArgumentList{
27 | Username: "",
28 | Password: "password",
29 | Hostname: "localhost",
30 | Port: "90",
31 | CollectionList: "{}",
32 | },
33 | true,
34 | },
35 | {
36 | "No Password",
37 | &ArgumentList{
38 | Username: "user",
39 | Hostname: "localhost",
40 | Port: "90",
41 | CollectionList: "{}",
42 | },
43 | true,
44 | },
45 | {
46 | "SSL and No Server Certificate",
47 | &ArgumentList{
48 | Username: "user",
49 | Password: "password",
50 | Hostname: "localhost",
51 | Port: "90",
52 | EnableSSL: true,
53 | TrustServerCertificate: false,
54 | SSLRootCertLocation: "",
55 | CollectionList: "{}",
56 | },
57 | true,
58 | },
59 | {
60 | "Missing Key file with Cert file",
61 | &ArgumentList{
62 | Username: "user",
63 | Password: "password",
64 | Hostname: "localhost",
65 | Port: "90",
66 | EnableSSL: true,
67 | TrustServerCertificate: true,
68 | SSLKeyLocation: "",
69 | SSLCertLocation: "my.crt",
70 | CollectionList: "{}",
71 | },
72 | false,
73 | },
74 | {
75 | "Missing Cert file with Key file",
76 | &ArgumentList{
77 | Username: "user",
78 | Password: "password",
79 | Hostname: "localhost",
80 | Port: "90",
81 | EnableSSL: true,
82 | TrustServerCertificate: true,
83 | SSLKeyLocation: "my.key",
84 | SSLCertLocation: "",
85 | CollectionList: "{}",
86 | },
87 | false,
88 | },
89 | }
90 |
91 | for _, tc := range testCases {
92 | err := tc.arg.Validate()
93 | if tc.wantError && err == nil {
94 | t.Errorf("Test Case %s Failed: Expected error", tc.name)
95 | } else if !tc.wantError && err != nil {
96 | t.Errorf("Test Case %s Failed: Unexpected error: %v", tc.name, err)
97 | }
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/src/connection/pgsql_connection_mock.go:
--------------------------------------------------------------------------------
1 | package connection
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/jmoiron/sqlx"
7 | "github.com/stretchr/testify/mock"
8 | "gopkg.in/DATA-DOG/go-sqlmock.v1"
9 | )
10 |
11 | // CreateMockSQL creates a Test SQLConnection. Must Close con when done
12 | func CreateMockSQL(t *testing.T) (con *PGSQLConnection, mock sqlmock.Sqlmock) {
13 | mockDB, mock, err := sqlmock.New()
14 | if err != nil {
15 | t.Errorf("Unexpected error while mocking: %s", err.Error())
16 | t.FailNow()
17 | }
18 |
19 | con = &PGSQLConnection{
20 | connection: sqlx.NewDb(mockDB, "sqlmock"),
21 | }
22 |
23 | return
24 | }
25 |
26 | // MockInfo is a mock struct which implements connection.Info
27 | type MockInfo struct {
28 | mock.Mock
29 | }
30 |
31 | // HostPort returns a mocked host name "testhost"
32 | func (mi *MockInfo) HostPort() (string, string) {
33 | return "testhost", "1234"
34 | }
35 |
36 | // DatabaseName returns a mocked database name "postgres"
37 | func (mi *MockInfo) DatabaseName() string {
38 | return "postgres"
39 | }
40 |
41 | // NewConnection creates a new mock info connection from the mockinfo struct
42 | func (mi *MockInfo) NewConnection(database string) (*PGSQLConnection, error) {
43 | args := mi.Called(database)
44 | return args.Get(0).(*PGSQLConnection), args.Error(1)
45 | }
46 |
--------------------------------------------------------------------------------
/src/fips.go:
--------------------------------------------------------------------------------
1 | // Copyright 2025 New Relic Corporation. All rights reserved.
2 | // SPDX-License-Identifier: Apache-2.0
3 |
4 | //go:build fips
5 | // +build fips
6 |
7 | package main
8 |
9 | import (
10 | _ "crypto/tls/fipsonly"
11 | )
12 |
--------------------------------------------------------------------------------
/src/inventory/inventory.go:
--------------------------------------------------------------------------------
1 | package inventory
2 |
3 | import (
4 | "github.com/newrelic/infra-integrations-sdk/v3/integration"
5 | "github.com/newrelic/infra-integrations-sdk/v3/log"
6 | "github.com/newrelic/nri-postgresql/src/connection"
7 | )
8 |
9 | const (
10 | configQuery = `SELECT name, setting, boot_val, reset_val FROM pg_settings`
11 | )
12 |
13 | type configQueryRow struct {
14 | Name string `db:"name"`
15 | Setting interface{} `db:"setting"`
16 | BootVal interface{} `db:"boot_val"`
17 | ResetVal interface{} `db:"reset_val"`
18 | }
19 |
20 | // PopulateInventory collects all the configuration and populates the instance entity
21 | func PopulateInventory(entity *integration.Entity, connection *connection.PGSQLConnection) {
22 | configRows := make([]*configQueryRow, 0)
23 | if err := connection.Query(&configRows, configQuery); err != nil {
24 | log.Error("Failed to execute config query: %v", err)
25 | }
26 |
27 | for _, row := range configRows {
28 | logInventoryFailure(entity.SetInventoryItem(row.Name+"/setting", "value", row.Setting))
29 | logInventoryFailure(entity.SetInventoryItem(row.Name+"/boot_val", "value", row.BootVal))
30 | logInventoryFailure(entity.SetInventoryItem(row.Name+"/reset_val", "value", row.ResetVal))
31 | }
32 | }
33 |
34 | func logInventoryFailure(err error) {
35 | if err != nil {
36 | log.Error("Failed set inventory item: %v", err)
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/src/inventory/inventory_test.go:
--------------------------------------------------------------------------------
1 | package inventory
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/newrelic/infra-integrations-sdk/v3/data/inventory"
7 | "github.com/newrelic/infra-integrations-sdk/v3/integration"
8 | "github.com/newrelic/nri-postgresql/src/connection"
9 | "github.com/stretchr/testify/assert"
10 |
11 | "gopkg.in/DATA-DOG/go-sqlmock.v1"
12 | )
13 |
14 | func TestPopulateInventory(t *testing.T) {
15 | testIntegration, _ := integration.New("test", "0.1.0")
16 | testEntity, _ := testIntegration.Entity("test", "instance")
17 |
18 | testConnection, mock := connection.CreateMockSQL(t)
19 |
20 | configRows := sqlmock.NewRows([]string{"name", "setting", "boot_val", "reset_val"}).
21 | AddRow("allow_system_table_mods", "off", "on", "test").
22 | AddRow("authentication_timeout", 1, 2, 3)
23 |
24 | mock.ExpectQuery(configQuery).WillReturnRows(configRows)
25 |
26 | PopulateInventory(testEntity, testConnection)
27 |
28 | expected := inventory.Items{
29 | "allow_system_table_mods/setting": {
30 | "value": "off",
31 | },
32 | "allow_system_table_mods/boot_val": {
33 | "value": "on",
34 | },
35 | "allow_system_table_mods/reset_val": {
36 | "value": "test",
37 | },
38 | "authentication_timeout/setting": {
39 | "value": 1,
40 | },
41 | "authentication_timeout/boot_val": {
42 | "value": 2,
43 | },
44 | "authentication_timeout/reset_val": {
45 | "value": 3,
46 | },
47 | }
48 |
49 | assert.Equal(t, expected, testEntity.Inventory.Items())
50 | }
51 |
--------------------------------------------------------------------------------
/src/main.go:
--------------------------------------------------------------------------------
1 | //go:generate goversioninfo
2 | package main
3 |
4 | import (
5 | "fmt"
6 | "os"
7 | "runtime"
8 | "strings"
9 |
10 | queryperformancemonitoring "github.com/newrelic/nri-postgresql/src/query-performance-monitoring"
11 |
12 | "github.com/newrelic/infra-integrations-sdk/v3/integration"
13 | "github.com/newrelic/infra-integrations-sdk/v3/log"
14 | "github.com/newrelic/nri-postgresql/src/args"
15 | "github.com/newrelic/nri-postgresql/src/collection"
16 | "github.com/newrelic/nri-postgresql/src/connection"
17 | "github.com/newrelic/nri-postgresql/src/inventory"
18 | "github.com/newrelic/nri-postgresql/src/metrics"
19 | )
20 |
21 | const (
22 | integrationName = "com.newrelic.postgresql"
23 | )
24 |
25 | var (
26 | integrationVersion = "0.0.0"
27 | gitCommit = ""
28 | buildDate = ""
29 | )
30 |
31 | func main() {
32 |
33 | var args args.ArgumentList
34 | // Create Integration
35 | pgIntegration, err := integration.New(integrationName, integrationVersion, integration.Args(&args))
36 | if err != nil {
37 | log.Error(err.Error())
38 | os.Exit(1)
39 | }
40 |
41 | if args.ShowVersion {
42 | fmt.Printf(
43 | "New Relic %s integration Version: %s, Platform: %s, GoVersion: %s, GitCommit: %s, BuildDate: %s\n",
44 | strings.Title(strings.Replace(integrationName, "com.newrelic.", "", 1)),
45 | integrationVersion,
46 | fmt.Sprintf("%s/%s", runtime.GOOS, runtime.GOARCH),
47 | runtime.Version(),
48 | gitCommit,
49 | buildDate)
50 | os.Exit(0)
51 | }
52 |
53 | // Setup logging with verbose
54 | log.SetupLogging(args.Verbose)
55 |
56 | // Validate arguments
57 | if err := args.Validate(); err != nil {
58 | log.Error("Configuration error for args %v: %s", args, err.Error())
59 | os.Exit(1)
60 | }
61 |
62 | connectionInfo := connection.DefaultConnectionInfo(&args)
63 | collectionList, err := collection.BuildCollectionList(args, connectionInfo)
64 | if err != nil {
65 | log.Error("Error creating list of entities to collect: %s", err)
66 | os.Exit(1)
67 | }
68 | instance, err := pgIntegration.Entity(fmt.Sprintf("%s:%s", args.Hostname, args.Port), "pg-instance")
69 | if err != nil {
70 | log.Error("Error creating instance entity: %s", err.Error())
71 | os.Exit(1)
72 | }
73 |
74 | if args.HasMetrics() {
75 | metrics.PopulateMetrics(connectionInfo, collectionList, instance, pgIntegration, args.Pgbouncer, args.CollectDbLockMetrics, args.CollectBloatMetrics, args.CustomMetricsQuery)
76 | if args.CustomMetricsConfig != "" {
77 | metrics.PopulateCustomMetricsFromFile(connectionInfo, args.CustomMetricsConfig, pgIntegration)
78 | }
79 | }
80 |
81 | if args.HasInventory() {
82 | con, err := connectionInfo.NewConnection(connectionInfo.DatabaseName())
83 | if err != nil {
84 | log.Error("Inventory collection failed: error creating connection to PostgreSQL: %s", err.Error())
85 | } else {
86 | defer con.Close()
87 | inventory.PopulateInventory(instance, con)
88 | }
89 | }
90 |
91 | if err = pgIntegration.Publish(); err != nil {
92 | log.Error(err.Error())
93 | }
94 |
95 | if args.EnableQueryMonitoring {
96 | queryperformancemonitoring.QueryPerformanceMain(args, pgIntegration, collectionList)
97 | }
98 |
99 | }
100 |
--------------------------------------------------------------------------------
/src/metrics/database_definitions_test.go:
--------------------------------------------------------------------------------
1 | package metrics
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/blang/semver/v4"
7 | "github.com/newrelic/nri-postgresql/src/collection"
8 | "github.com/stretchr/testify/assert"
9 | )
10 |
11 | func Test_generateDatabaseDefinitions_LengthV8(t *testing.T) {
12 | v8 := semver.MustParse("8.0.0")
13 |
14 | databaseList := collection.DatabaseList{"test1": {}}
15 |
16 | queryDefinitions := generateDatabaseDefinitions(databaseList, &v8)
17 |
18 | assert.Equal(t, 1, len(queryDefinitions))
19 | }
20 |
21 | func Test_generateDatabaseDefinitions_LengthV912(t *testing.T) {
22 | v912 := semver.MustParse("9.1.2")
23 | databaseList := collection.DatabaseList{"test1": {}}
24 |
25 | queryDefinitions := generateDatabaseDefinitions(databaseList, &v912)
26 |
27 | assert.Equal(t, 1, len(queryDefinitions))
28 | }
29 |
30 | func Test_generateDatabaseDefinitions_LengthV925(t *testing.T) {
31 | v925 := semver.MustParse("9.2.5")
32 | databaseList := collection.DatabaseList{"test1": {}}
33 |
34 | queryDefinitions := generateDatabaseDefinitions(databaseList, &v925)
35 |
36 | assert.Equal(t, 2, len(queryDefinitions))
37 | }
38 |
39 | func Test_insertDatabaseNames(t *testing.T) {
40 | t.Parallel()
41 |
42 | testDefinition := &QueryDefinition{
43 | query: `SELECT * FROM test WHERE database IN (%DATABASES%);`,
44 | dataModels: &[]struct{}{},
45 | }
46 |
47 | databaseList := collection.DatabaseList{"test1": {}, "test2": {}}
48 | td := testDefinition.insertDatabaseNames(databaseList)
49 |
50 | // The database names order is undetermined but the query is equivalent.
51 | assert.Contains(t,
52 | []string{
53 | `SELECT * FROM test WHERE database IN ('test1','test2');`,
54 | `SELECT * FROM test WHERE database IN ('test2','test1');`,
55 | },
56 | td.query,
57 | )
58 | }
59 |
--------------------------------------------------------------------------------
/src/metrics/index_definitions.go:
--------------------------------------------------------------------------------
1 | package metrics
2 |
3 | import (
4 | "github.com/newrelic/nri-postgresql/src/collection"
5 | )
6 |
7 | func generateIndexDefinitions(schemaList collection.SchemaList) []*QueryDefinition {
8 | queryDefinitions := make([]*QueryDefinition, 0)
9 | if def := indexDefinition.insertSchemaTableIndexes(schemaList); def != nil {
10 | queryDefinitions = append(queryDefinitions, def)
11 | }
12 |
13 | return queryDefinitions
14 | }
15 |
16 | var indexDefinition = &QueryDefinition{
17 | query: `select -- INDEXQUERY
18 | current_database() as database,
19 | t.schemaname as schema_name,
20 | t.tablename as table_name,
21 | indexname as index_name,
22 | pg_relation_size(foo.indexoid) AS index_size,
23 | idx_tup_read AS tuples_read,
24 | idx_tup_fetch AS tuples_fetched
25 | FROM pg_tables t
26 | LEFT OUTER JOIN
27 | ( SELECT c.relname AS ctablename, n.nspname AS cschemaname, x.indexrelid indexoid, ipg.relname AS indexname, x.indnatts AS number_of_columns, idx_scan, idx_tup_read, idx_tup_fetch, indexrelname, indisunique FROM pg_index x
28 | JOIN pg_class c ON c.oid = x.indrelid
29 | JOIN pg_namespace n ON c.relnamespace = n.oid
30 | JOIN pg_class ipg ON ipg.oid = x.indexrelid
31 | JOIN pg_stat_all_indexes psai ON x.indexrelid = psai.indexrelid
32 | )
33 | AS foo
34 | ON t.tablename = foo.ctablename AND t.schemaname = foo.cschemaname
35 | where indexname is not null and t.schemaname || '.' || t.tablename || '.' || indexname in (%SCHEMA_TABLE_INDEXES%)
36 | ORDER BY 1,2;`,
37 |
38 | dataModels: []struct {
39 | databaseBase
40 | schemaBase
41 | tableBase
42 | indexBase
43 | IndexSize *int64 `db:"index_size" metric_name:"index.sizeInBytes" source_type:"gauge"`
44 | RowsRead *int64 `db:"tuples_read" metric_name:"index.rowsReadPerSecond" source_type:"rate"`
45 | RowsFetched *int64 `db:"tuples_fetched" metric_name:"index.rowsFetchedPerSecond" source_type:"rate"`
46 | }{},
47 | }
48 |
--------------------------------------------------------------------------------
/src/metrics/instance_definitions_test.go:
--------------------------------------------------------------------------------
1 | package metrics
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/blang/semver/v4"
7 | "github.com/stretchr/testify/assert"
8 | )
9 |
10 | func Test_generateInstanceDefinitions(t *testing.T) {
11 | tests := []struct {
12 | name string
13 | version string
14 | expectedQueries []*QueryDefinition
15 | }{
16 | {
17 | name: "PostgreSQL 9.0",
18 | version: "9.0.0",
19 | expectedQueries: []*QueryDefinition{instanceDefinitionBase},
20 | },
21 | {
22 | name: "PostgreSQL 9.1",
23 | version: "9.1.0",
24 | expectedQueries: []*QueryDefinition{instanceDefinitionBase, instanceDefinition91},
25 | },
26 | {
27 | name: "PostgreSQL 9.2",
28 | version: "9.2.0",
29 | expectedQueries: []*QueryDefinition{instanceDefinitionBase, instanceDefinition91, instanceDefinition92},
30 | },
31 | {
32 | name: "PostgreSQL 10.2",
33 | version: "10.2.0",
34 | expectedQueries: []*QueryDefinition{instanceDefinitionBase, instanceDefinition91, instanceDefinition92},
35 | },
36 | {
37 | name: "PostgreSQL 16.4",
38 | version: "16.4.2",
39 | expectedQueries: []*QueryDefinition{instanceDefinitionBase, instanceDefinition91, instanceDefinition92},
40 | },
41 | {
42 | name: "PostgreSQL 17.0",
43 | version: "17.0.0",
44 | expectedQueries: []*QueryDefinition{instanceDefinitionBase170, instanceDefinition170, instanceDefinitionInputOutput170},
45 | },
46 | }
47 |
48 | for _, tt := range tests {
49 | t.Run(tt.name, func(t *testing.T) {
50 | version := semver.MustParse(tt.version)
51 | queryDefinitions := generateInstanceDefinitions(&version)
52 | assert.Equal(t, len(tt.expectedQueries), len(queryDefinitions))
53 | assert.Equal(t, tt.expectedQueries, queryDefinitions)
54 | })
55 | }
56 |
57 | }
58 |
59 | func Test_generateInstanceDefinitionsOutOfOrder(t *testing.T) {
60 | t.Run("PostgreSQL 17.5 order check", func(t *testing.T) {
61 | version := semver.MustParse("17.5.0")
62 | queryDefinitions := generateInstanceDefinitions(&version)
63 | expectedQueries := []*QueryDefinition{instanceDefinitionInputOutput170, instanceDefinition170, instanceDefinitionBase170}
64 |
65 | // This fails because order is different
66 | assert.False(t, assert.ObjectsAreEqual(expectedQueries, queryDefinitions), "Query definitions should be in the correct order")
67 | })
68 | }
69 |
--------------------------------------------------------------------------------
/src/metrics/lock_definitions.go:
--------------------------------------------------------------------------------
1 | package metrics
2 |
3 | import (
4 | "github.com/newrelic/nri-postgresql/src/collection"
5 | )
6 |
7 | func generateLockDefinitions(databases collection.DatabaseList) []*QueryDefinition {
8 | queryDefinitions := make([]*QueryDefinition, 0, 1)
9 | if len(databases) == 0 {
10 | return queryDefinitions
11 | }
12 |
13 | queryDefinitions = append(queryDefinitions, lockDefinitions.insertDatabaseNames(databases))
14 |
15 | return queryDefinitions
16 | }
17 |
18 | var lockDefinitions = &QueryDefinition{
19 | query: `SELECT -- LOCKS_DEFINITION
20 | database,
21 | COALESCE(access_exclusive_lock, 0) AS access_exclusive_lock,
22 | COALESCE(access_share_lock, 0) AS access_share_lock,
23 | COALESCE(exclusive_lock, 0) AS exclusive_lock,
24 | COALESCE(row_exclusive_lock, 0) AS row_exclusive_lock,
25 | COALESCE(row_share_lock, 0) AS row_share_lock,
26 | COALESCE(share_lock, 0) AS share_lock,
27 | COALESCE(share_row_exclusive_lock, 0) AS share_row_exclusive_lock,
28 | COALESCE(share_update_exclusive_lock, 0) AS share_update_exclusive_lock
29 | FROM public.crosstab(
30 | $$SELECT psa.datname AS database,
31 | lock.mode,
32 | count(lock.mode)
33 | FROM pg_locks AS lock
34 | LEFT JOIN pg_stat_activity AS psa ON lock.pid = psa.pid
35 | WHERE psa.datname IN (%DATABASES%)
36 | GROUP BY lock.database, lock.mode, psa.datname
37 | ORDER BY database,mode$$,
38 | $$VALUES ('AccessExclusiveLock'::text),
39 | ('AccessShareLock'::text),
40 | ('ExclusiveLock'::text),
41 | ('RowExclusiveLock'::text),
42 | ('RowShareLock'::text),
43 | ('ShareLock'::text),
44 | ('ShareRowExclusiveLock'::text),
45 | ('ShareUpdateExclusiveLock'::text) $$
46 | ) AS data (
47 | database text,
48 | access_exclusive_lock numeric,
49 | access_share_lock numeric,
50 | exclusive_lock numeric,
51 | row_exclusive_lock numeric,
52 | row_share_lock numeric,
53 | share_lock numeric,
54 | share_row_exclusive_lock numeric,
55 | share_update_exclusive_lock numeric
56 | );`,
57 | dataModels: []struct {
58 | databaseBase
59 | AccessExclusiveLock *int64 `db:"access_exclusive_lock" metric_name:"db.locks.accessExclusiveLock" source_type:"gauge"`
60 | AccessShareLock *int64 `db:"access_share_lock" metric_name:"db.locks.accessShareLock" source_type:"gauge"`
61 | ExclusiveLock *int64 `db:"exclusive_lock" metric_name:"db.locks.exclusiveLock" source_type:"gauge"`
62 | RowExclusiveLock *int64 `db:"row_exclusive_lock" metric_name:"db.locks.rowExclusiveLock" source_type:"gauge"`
63 | RowShareLock *int64 `db:"row_share_lock" metric_name:"db.locks.rowShareLock" source_type:"gauge"`
64 | ShareLock *int64 `db:"share_lock" metric_name:"db.locks.shareLock" source_type:"gauge"`
65 | ShareRowExclusiveLock *int64 `db:"share_row_exclusive_lock" metric_name:"db.locks.shareRowExclusiveLock" source_type:"gauge"`
66 | ShareUpdateExclusiveLock *int64 `db:"share_update_exclusive_lock" metric_name:"db.locks.shareUpdateExclusiveLock" source_type:"gauge"`
67 | }{},
68 | }
69 |
--------------------------------------------------------------------------------
/src/metrics/metric_types.go:
--------------------------------------------------------------------------------
1 | package metrics
2 |
3 | import (
4 | "fmt"
5 | "reflect"
6 | "strings"
7 |
8 | "github.com/newrelic/nri-postgresql/src/collection"
9 | )
10 |
11 | // QueryDefinition holds the query and the unmarshall model
12 | type QueryDefinition struct {
13 | query string
14 | dataModels interface{}
15 | }
16 |
17 | // GetQuery returns the query of the QueryDefinition
18 | func (qd QueryDefinition) GetQuery() string {
19 | return qd.query
20 | }
21 |
22 | // GetDataModels returns the data models of the QueryDefinition
23 | func (qd QueryDefinition) GetDataModels() interface{} {
24 | ptr := reflect.New(reflect.ValueOf(qd.dataModels).Type())
25 | return ptr.Interface()
26 | }
27 |
28 | func (qd QueryDefinition) insertDatabaseNames(databases collection.DatabaseList) *QueryDefinition {
29 | schemaDBs := make([]string, 0)
30 | for schemaDB := range databases {
31 | schemaDBs = append(schemaDBs, fmt.Sprintf("'%s'", schemaDB))
32 | }
33 |
34 | if len(schemaDBs) == 0 {
35 | return nil
36 | }
37 |
38 | schemaDBString := strings.Join(schemaDBs, ",")
39 |
40 | newDBDef := &QueryDefinition{
41 | dataModels: qd.dataModels,
42 | query: strings.Replace(qd.query, `%DATABASES%`, schemaDBString, 1),
43 | }
44 |
45 | return newDBDef
46 | }
47 |
48 | func (qd QueryDefinition) insertSchemaTables(schemaList collection.SchemaList) *QueryDefinition {
49 | schemaTables := make([]string, 0)
50 | for schema, tableList := range schemaList {
51 | for table := range tableList {
52 | schemaTables = append(schemaTables, fmt.Sprintf("'%s.%s'", schema, table))
53 | }
54 | }
55 |
56 | if len(schemaTables) == 0 {
57 | return nil
58 | }
59 |
60 | schemaTablesString := strings.Join(schemaTables, ",")
61 |
62 | newTableDef := &QueryDefinition{
63 | dataModels: qd.dataModels,
64 | query: strings.Replace(qd.query, `%SCHEMA_TABLES%`, schemaTablesString, 1),
65 | }
66 |
67 | return newTableDef
68 | }
69 |
70 | func (qd QueryDefinition) insertSchemaTableIndexes(schemaList collection.SchemaList) *QueryDefinition {
71 | schemaTableIndexes := make([]string, 0)
72 | for schema, tableList := range schemaList {
73 | for table, indexList := range tableList {
74 | for _, index := range indexList {
75 | schemaTableIndexes = append(schemaTableIndexes, fmt.Sprintf("'%s.%s.%s'", schema, table, index))
76 | }
77 | }
78 | }
79 |
80 | if len(schemaTableIndexes) == 0 {
81 | return nil
82 | }
83 |
84 | schemaTableIndexString := strings.Join(schemaTableIndexes, ",")
85 |
86 | newIndexDef := &QueryDefinition{
87 | dataModels: qd.dataModels,
88 | query: strings.Replace(qd.query, `%SCHEMA_TABLE_INDEXES%`, schemaTableIndexString, 1),
89 | }
90 |
91 | return newIndexDef
92 | }
93 |
--------------------------------------------------------------------------------
/src/metrics/modelers.go:
--------------------------------------------------------------------------------
1 | package metrics
2 |
3 | import (
4 | "errors"
5 | "reflect"
6 | )
7 |
8 | // DatabaseModeler is an interface to represent somethign which has a database field
9 | type DatabaseModeler interface {
10 | GetDatabaseName() (string, error)
11 | }
12 |
13 | type databaseBase struct {
14 | Database *string `db:"database"`
15 | }
16 |
17 | // GetDatabaseName returns the database name for the object
18 | func (d databaseBase) GetDatabaseName() (string, error) {
19 | if d.Database == nil {
20 | return "", errors.New("database name not returned")
21 | }
22 | return *d.Database, nil
23 | }
24 |
25 | // GetDatabaseName returns the database name for the object
26 | func GetDatabaseName(dataModel interface{}) (string, error) {
27 | v := reflect.ValueOf(dataModel)
28 | modeler, ok := v.Interface().(DatabaseModeler)
29 | if !ok {
30 | return "", errors.New("data model does not implement DatabaseModeler interface")
31 | }
32 |
33 | name, err := modeler.GetDatabaseName()
34 | if err != nil {
35 | return "", err
36 | }
37 |
38 | return name, nil
39 | }
40 |
41 | // SchemaModeler is an interface to represent something which has a schema field
42 | type SchemaModeler interface {
43 | GetSchemaName() (string, error)
44 | }
45 |
46 | type schemaBase struct {
47 | Schema *string `db:"schema_name"`
48 | }
49 |
50 | // GetSchemaName returns a schema name
51 | func (d schemaBase) GetSchemaName() (string, error) {
52 | if d.Schema == nil {
53 | return "", errors.New("schema name not returned")
54 | }
55 | return *d.Schema, nil
56 | }
57 |
58 | // GetSchemaName returns a schema name
59 | func GetSchemaName(dataModel interface{}) (string, error) {
60 | v := reflect.ValueOf(dataModel)
61 | modeler, ok := v.Interface().(SchemaModeler)
62 | if !ok {
63 | return "", errors.New("data model does not implement SchemaModeler interface")
64 | }
65 |
66 | name, err := modeler.GetSchemaName()
67 | if err != nil {
68 | return "", err
69 | }
70 |
71 | return name, nil
72 | }
73 |
74 | // TableModeler is an interface to represent something which has a table field
75 | type TableModeler interface {
76 | GetTableName() (string, error)
77 | }
78 |
79 | type tableBase struct {
80 | Table *string `db:"table_name"`
81 | }
82 |
83 | // GetTableName returns the table name
84 | func (d tableBase) GetTableName() (string, error) {
85 | if d.Table == nil {
86 | return "", errors.New("table name not returned")
87 | }
88 | return *d.Table, nil
89 | }
90 |
91 | // GetTableName returns the table name
92 | func GetTableName(dataModel interface{}) (string, error) {
93 | v := reflect.ValueOf(dataModel)
94 | modeler, ok := v.Interface().(TableModeler)
95 | if !ok {
96 | return "", errors.New("data model does not implement TableModeler interface")
97 | }
98 |
99 | name, err := modeler.GetTableName()
100 | if err != nil {
101 | return "", err
102 | }
103 |
104 | return name, nil
105 | }
106 |
107 | // IndexModeler represents something with a table field
108 | type IndexModeler interface {
109 | GetIndexName() (string, error)
110 | }
111 |
112 | type indexBase struct {
113 | Index *string `db:"index_name"`
114 | }
115 |
116 | // GetIndexName returns the index name
117 | func (d indexBase) GetIndexName() (string, error) {
118 | if d.Index == nil {
119 | return "", errors.New("index name not returned")
120 | }
121 | return *d.Index, nil
122 | }
123 |
124 | // GetIndexName returns the index name
125 | func GetIndexName(dataModel interface{}) (string, error) {
126 | v := reflect.ValueOf(dataModel)
127 | modeler, ok := v.Interface().(IndexModeler)
128 | if !ok {
129 | return "", errors.New("data model does not implement IndexModeler interface")
130 | }
131 |
132 | name, err := modeler.GetIndexName()
133 | if err != nil {
134 | return "", err
135 | }
136 |
137 | return name, nil
138 | }
139 |
--------------------------------------------------------------------------------
/src/metrics/pgbouncer_definitions.go:
--------------------------------------------------------------------------------
1 | package metrics
2 |
3 | func generatePgBouncerDefinitions() []*QueryDefinition {
4 | queryDefinitions := make([]*QueryDefinition, 2)
5 | queryDefinitions[0] = pgbouncerStatsDefinition
6 | queryDefinitions[1] = pgbouncerPoolsDefinition
7 |
8 | return queryDefinitions
9 | }
10 |
11 | var pgbouncerStatsDefinition = &QueryDefinition{
12 | query: `SHOW STATS;`,
13 |
14 | dataModels: []struct {
15 | databaseBase
16 | TotalXactCount *int64 `db:"total_xact_count" metric_name:"pgbouncer.stats.transactionsPerSecond" source_type:"rate"`
17 | TotalQueryCount *int64 `db:"total_query_count" metric_name:"pgbouncer.stats.queriesPerSecond" source_type:"rate"`
18 | TotalServerAssignmentCount *int64 `db:"total_server_assignment_count" metric_name:"pgbouncer.stats.totalServerAssignmentCount" source_type:"gauge"` // added in v1.23
19 | TotalReceived *int64 `db:"total_received" metric_name:"pgbouncer.stats.bytesInPerSecond" source_type:"rate"`
20 | TotalSent *int64 `db:"total_sent" metric_name:"pgbouncer.stats.bytesOutPerSecond" source_type:"rate"`
21 | TotalXactTime *int64 `db:"total_xact_time" metric_name:"pgbouncer.stats.totalTransactionDurationInMillisecondsPerSecond" source_type:"rate"`
22 | TotalQueryTime *int64 `db:"total_query_time" metric_name:"pgbouncer.stats.totalQueryDurationInMillisecondsPerSecond" source_type:"rate"`
23 | TotalRequests *int64 `db:"total_requests" metric_name:"pgbouncer.stats.requestsPerSecond" source_type:"rate"`
24 | TotalWaitTime *int64 `db:"total_wait_time"`
25 | AvgXactCount *int64 `db:"avg_xact_count" metric_name:"pgbouncer.stats.avgTransactionCount" source_type:"gauge"`
26 | AvgXactTime *int64 `db:"avg_xact_time" metric_name:"pgbouncer.stats.avgTransactionDurationInMilliseconds" source_type:"gauge"`
27 | AvgQueryCount *int64 `db:"avg_query_count" metric_name:"pgbouncer.stats.avgQueryCount" source_type:"gauge"`
28 | AvgServerAssignmentCount *int64 `db:"avg_server_assignment_count" metric_name:"pgbouncer.stats.avgServerAssignmentCount" source_type:"gauge"` // added in v1.23
29 | AvgRecv *int64 `db:"avg_recv" metric_name:"pgbouncer.stats.avgBytesIn" source_type:"gauge"`
30 | AvgSent *int64 `db:"avg_sent" metric_name:"pgbouncer.stats.avgBytesOut" source_type:"gauge"`
31 | AvgReq *int64 `db:"avg_req" metric_name:"pgbouncer.stats.avgRequestsPerSecond" source_type:"gauge"`
32 | AvgQueryTime *int64 `db:"avg_query_time" metric_name:"pgbouncer.stats.avgQueryDurationInMilliseconds" source_type:"gauge"`
33 | AvgQuery *int64 `db:"avg_query" metric_name:"pgbouncer.stats.avgQueryDurationInMilliseconds" source_type:"gauge"`
34 | AvgWaitTime *int64 `db:"avg_wait_time"`
35 | }{},
36 | }
37 |
38 | var pgbouncerPoolsDefinition = &QueryDefinition{
39 | query: `SHOW POOLS;`,
40 |
41 | dataModels: []struct {
42 | databaseBase
43 | User *string `db:"user" metric_name:"pgbouncer.pools.user" source_type:"attribute"`
44 | ClCancelReq *int64 `db:"cl_cancel_req" metric_name:"pgbouncer.pools.clientConnectionsCancelReq" source_type:"gauge"` // removed in v1.18
45 | ClActive *int64 `db:"cl_active" metric_name:"pgbouncer.pools.clientConnectionsActive" source_type:"gauge"`
46 | ClWaiting *int64 `db:"cl_waiting" metric_name:"pgbouncer.pools.clientConnectionsWaiting" source_type:"gauge"`
47 | ClWaitingCancelReq *int64 `db:"cl_waiting_cancel_req" metric_name:"pgbouncer.pools.clientConnectionsWaitingCancelReq" source_type:"gauge"` // added in v1.18
48 | ClActiveCancelReq *int64 `db:"cl_active_cancel_req" metric_name:"pgbouncer.pools.clientConnectionsActiveCancelReq" source_type:"gauge"` // added in v1.18
49 | SvActiveCancel *int64 `db:"sv_active_cancel" metric_name:"pgbouncer.pools.serverConnectionsActiveCancel" source_type:"gauge"` // added in v1.18
50 | SvBeingCancel *int64 `db:"sv_being_canceled" metric_name:"pgbouncer.pools.serverConnectionsBeingCancel" source_type:"gauge"` // added in v1.18
51 | SvActive *int64 `db:"sv_active" metric_name:"pgbouncer.pools.serverConnectionsActive" source_type:"gauge"`
52 | SvIdle *int64 `db:"sv_idle" metric_name:"pgbouncer.pools.serverConnectionsIdle" source_type:"gauge"`
53 | SvUsed *int64 `db:"sv_used" metric_name:"pgbouncer.pools.serverConnectionsUsed" source_type:"gauge"`
54 | SvTested *int64 `db:"sv_tested" metric_name:"pgbouncer.pools.serverConnectionsTested" source_type:"gauge"`
55 | SvLogin *int64 `db:"sv_login" metric_name:"pgbouncer.pools.serverConnectionsLogin" source_type:"gauge"`
56 | MaxWait *int64 `db:"maxwait" metric_name:"pgbouncer.pools.maxwaitInMilliseconds" source_type:"gauge"`
57 | MaxWaitUs *int64 `db:"maxwait_us"`
58 | PoolMode *string `db:"pool_mode"`
59 | }{},
60 | }
61 |
--------------------------------------------------------------------------------
/src/metrics/version_test.go:
--------------------------------------------------------------------------------
1 | package metrics
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/blang/semver/v4"
7 | "github.com/newrelic/nri-postgresql/src/connection"
8 | "github.com/stretchr/testify/assert"
9 | "gopkg.in/DATA-DOG/go-sqlmock.v1"
10 | )
11 |
12 | func Test_collectVersion(t *testing.T) {
13 | testConnection, mock := connection.CreateMockSQL(t)
14 |
15 | versionRows := sqlmock.NewRows([]string{"server_version"}).
16 | AddRow("10.3")
17 |
18 | mock.ExpectQuery(versionQuery).WillReturnRows(versionRows)
19 |
20 | expected := &semver.Version{
21 | Major: 10,
22 | Minor: 3,
23 | }
24 |
25 | version, err := CollectVersion(testConnection)
26 |
27 | assert.Nil(t, err)
28 | assert.Equal(t, expected, version)
29 | }
30 |
31 | func Test_collectVersion_EnterpriseDB(t *testing.T) {
32 | testConnection, mock := connection.CreateMockSQL(t)
33 |
34 | versionRows := sqlmock.NewRows([]string{"server_version"}).
35 | AddRow("9.6.7.13")
36 |
37 | mock.ExpectQuery(versionQuery).WillReturnRows(versionRows)
38 |
39 | expected := &semver.Version{
40 | Major: 9,
41 | Minor: 6,
42 | Patch: 7,
43 | }
44 |
45 | version, err := CollectVersion(testConnection)
46 |
47 | assert.Nil(t, err)
48 | assert.Equal(t, expected, version)
49 | }
50 |
51 | func Test_collectVersion_Ubuntu(t *testing.T) {
52 | testConnection, mock := connection.CreateMockSQL(t)
53 |
54 | versionRows := sqlmock.NewRows([]string{"server_version"}).
55 | AddRow("10.4 (Ubuntu 10.4-2.pgdg16.04+1)")
56 |
57 | mock.ExpectQuery(versionQuery).WillReturnRows(versionRows)
58 |
59 | expected := &semver.Version{
60 | Major: 10,
61 | Minor: 4,
62 | }
63 |
64 | version, err := CollectVersion(testConnection)
65 |
66 | assert.Nil(t, err)
67 | assert.Equal(t, expected, version)
68 | }
69 |
70 | func Test_collectVersion_Debian(t *testing.T) {
71 | testConnection, mock := connection.CreateMockSQL(t)
72 |
73 | versionRows := sqlmock.NewRows([]string{"server_version"}).
74 | AddRow("10.4 (Debian 10.4-2.pgdg16.04+1)")
75 |
76 | mock.ExpectQuery(versionQuery).WillReturnRows(versionRows)
77 |
78 | expected := &semver.Version{
79 | Major: 10,
80 | Minor: 4,
81 | }
82 |
83 | version, err := CollectVersion(testConnection)
84 |
85 | assert.Nil(t, err)
86 | assert.Equal(t, expected, version)
87 | }
88 |
89 | func Test_collectVersion_Err(t *testing.T) {
90 | testConnection, mock := connection.CreateMockSQL(t)
91 |
92 | versionRows := sqlmock.NewRows([]string{"server_version"}).
93 | AddRow("invalid.version.number")
94 |
95 | mock.ExpectQuery(versionQuery).WillReturnRows(versionRows)
96 |
97 | _, err := CollectVersion(testConnection)
98 |
99 | assert.NotNil(t, err)
100 | }
101 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/common-parameters/common_parameters.go:
--------------------------------------------------------------------------------
1 | package commonparameters
2 |
3 | import (
4 | "github.com/newrelic/infra-integrations-sdk/v3/log"
5 | "github.com/newrelic/nri-postgresql/src/args"
6 | )
7 |
8 | // The maximum number records that can be fetched in a single metrics
9 | const MaxQueryCountThreshold = 30
10 |
11 | // DefaultQueryMonitoringCountThreshold is the default threshold for the number of queries to monitor.
12 | const DefaultQueryMonitoringCountThreshold = 20
13 |
14 | // DefaultQueryResponseTimeThreshold is the default threshold for the response time of a query.
15 | const DefaultQueryResponseTimeThreshold = 500
16 |
17 | type CommonParameters struct {
18 | Version uint64
19 | Databases string
20 | QueryMonitoringCountThreshold int
21 | QueryMonitoringResponseTimeThreshold int
22 | Host string
23 | Port string
24 | }
25 |
26 | func SetCommonParameters(args args.ArgumentList, version uint64, databases string) *CommonParameters {
27 | return &CommonParameters{
28 | Version: version,
29 | Databases: databases, // comma separated database names
30 | QueryMonitoringCountThreshold: validateAndGetQueryMonitoringCountThreshold(args),
31 | QueryMonitoringResponseTimeThreshold: validateAndGetQueryMonitoringResponseTimeThreshold(args),
32 | Host: args.Hostname,
33 | Port: args.Port,
34 | }
35 | }
36 |
37 | func validateAndGetQueryMonitoringResponseTimeThreshold(args args.ArgumentList) int {
38 | if args.QueryMonitoringResponseTimeThreshold < 0 {
39 | log.Warn("QueryResponseTimeThreshold should be greater than or equal to 0 but the input is %d, setting value to default which is %d", args.QueryMonitoringResponseTimeThreshold, DefaultQueryResponseTimeThreshold)
40 | return DefaultQueryResponseTimeThreshold
41 | }
42 | return args.QueryMonitoringResponseTimeThreshold
43 | }
44 |
45 | func validateAndGetQueryMonitoringCountThreshold(args args.ArgumentList) int {
46 | if args.QueryMonitoringCountThreshold < 0 {
47 | log.Warn("QueryCountThreshold should be greater than 0 but the input is %d, setting value to default which is %d", args.QueryMonitoringCountThreshold, DefaultQueryMonitoringCountThreshold)
48 | return DefaultQueryMonitoringCountThreshold
49 | }
50 | if args.QueryMonitoringCountThreshold > MaxQueryCountThreshold {
51 | log.Warn("QueryCountThreshold should be less than or equal to max limit but the input is %d, setting value to max limit which is %d", args.QueryMonitoringCountThreshold, MaxQueryCountThreshold)
52 | return MaxQueryCountThreshold
53 | }
54 | return args.QueryMonitoringCountThreshold
55 | }
56 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/common-utils/common_helpers.go:
--------------------------------------------------------------------------------
1 | package commonutils
2 |
3 | import (
4 | "crypto/rand"
5 | "fmt"
6 | "math/big"
7 | "regexp"
8 | "strings"
9 | "time"
10 |
11 | "github.com/newrelic/nri-postgresql/src/collection"
12 | )
13 |
14 | // re is a regular expression that matches single-quoted strings, numbers, or double-quoted strings
15 | var re = regexp.MustCompile(`'[^']*'|\d+|".*?"`)
16 |
17 | func GetDatabaseListInString(dbMap collection.DatabaseList) string {
18 | if len(dbMap) == 0 {
19 | return ""
20 | }
21 | var quotedNames = make([]string, 0)
22 | for dbName := range dbMap {
23 | quotedNames = append(quotedNames, fmt.Sprintf("'%s'", dbName))
24 | }
25 | return strings.Join(quotedNames, ",")
26 | }
27 |
28 | func AnonymizeQueryText(query string) string {
29 | anonymizedQuery := re.ReplaceAllString(query, "?")
30 | return anonymizedQuery
31 | }
32 |
33 | // This function is used to generate a unique plan ID for a query
34 | func GeneratePlanID() (string, error) {
35 | randomInt, err := rand.Int(rand.Reader, big.NewInt(RandomIntRange))
36 | if err != nil {
37 | return "", ErrUnExpectedError
38 | }
39 | currentTime := time.Now().Format(TimeFormat)
40 | result := fmt.Sprintf("%d-%s", randomInt.Int64(), currentTime)
41 | return result, nil
42 | }
43 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/common-utils/common_helpers_test.go:
--------------------------------------------------------------------------------
1 | package commonutils
2 |
3 | import (
4 | "sort"
5 | "testing"
6 |
7 | "github.com/newrelic/nri-postgresql/src/collection"
8 | "github.com/stretchr/testify/assert"
9 | )
10 |
11 | func TestGetDatabaseListInString(t *testing.T) {
12 | dbListKeys := []string{"db1"}
13 | sort.Strings(dbListKeys) // Sort the keys to ensure consistent order
14 | dbList := collection.DatabaseList{}
15 | for _, key := range dbListKeys {
16 | dbList[key] = collection.SchemaList{}
17 | }
18 | expected := "'db1'"
19 | result := GetDatabaseListInString(dbList)
20 | assert.Equal(t, expected, result)
21 |
22 | // Test with empty database list
23 | dbList = collection.DatabaseList{}
24 | expected = ""
25 | result = GetDatabaseListInString(dbList)
26 | assert.Equal(t, expected, result)
27 | }
28 |
29 | func TestAnonymizeQueryText(t *testing.T) {
30 | query := "SELECT * FROM users WHERE id = 1 AND name = 'John'"
31 | expected := "SELECT * FROM users WHERE id = ? AND name = ?"
32 | result := AnonymizeQueryText(query)
33 | assert.Equal(t, expected, result)
34 | query = "SELECT * FROM employees WHERE id = 10 OR name <> 'John Doe' OR name != 'John Doe' OR age < 30 OR age <= 30 OR salary > 50000OR salary >= 50000 OR department LIKE 'Sales%' OR department ILIKE 'sales%'OR join_date BETWEEN '2023-01-01' AND '2023-12-31' OR department IN ('HR', 'Engineering', 'Marketing') OR department IS NOT NULL OR department IS NULL;"
35 | expected = "SELECT * FROM employees WHERE id = ? OR name <> ? OR name != ? OR age < ? OR age <= ? OR salary > ?OR salary >= ? OR department LIKE ? OR department ILIKE ?OR join_date BETWEEN ? AND ? OR department IN (?, ?, ?) OR department IS NOT NULL OR department IS NULL;"
36 | result = AnonymizeQueryText(query)
37 | assert.Equal(t, expected, result)
38 | }
39 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/common-utils/constants.go:
--------------------------------------------------------------------------------
1 | package commonutils
2 |
3 | import "errors"
4 |
5 | // The maximum number of metrics to be published in a single batch
6 | const PublishThreshold = 600
7 | const RandomIntRange = 1000000
8 | const TimeFormat = "20060102150405"
9 |
10 | // The maximum number of individual queries that can be fetched in a single metrics, the value was chosen as the queries samples were with same query statements but with different parameters so 10 samples would be enough to check the execution plan
11 | const MaxIndividualQueryCountThreshold = 10
12 |
13 | var ErrUnsupportedVersion = errors.New("unsupported PostgreSQL version")
14 | var ErrUnExpectedError = errors.New("unexpected error")
15 |
16 | var ErrInvalidModelType = errors.New("invalid model type")
17 | var ErrNotEligible = errors.New("not Eligible to fetch metrics")
18 |
19 | const PostgresVersion12 = 12
20 | const PostgresVersion11 = 11
21 | const PostgresVersion13 = 13
22 | const PostgresVersion14 = 14
23 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/common-utils/ingestion-helpers.go:
--------------------------------------------------------------------------------
1 | package commonutils
2 |
3 | import (
4 | "fmt"
5 | "reflect"
6 |
7 | commonparameters "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-parameters"
8 |
9 | "github.com/newrelic/infra-integrations-sdk/v3/data/metric"
10 | "github.com/newrelic/infra-integrations-sdk/v3/integration"
11 | "github.com/newrelic/infra-integrations-sdk/v3/log"
12 | )
13 |
14 | func SetMetric(metricSet *metric.Set, name string, value interface{}, sourceType string) {
15 | switch sourceType {
16 | case `gauge`:
17 | err := metricSet.SetMetric(name, value, metric.GAUGE)
18 | if err != nil {
19 | log.Error("Error setting metric: %v", err)
20 | return
21 | }
22 | case `attribute`:
23 | err := metricSet.SetMetric(name, value, metric.ATTRIBUTE)
24 | if err != nil {
25 | log.Error("Error setting metric: %v", err)
26 | return
27 | }
28 | default:
29 | err := metricSet.SetMetric(name, value, metric.GAUGE)
30 | if err != nil {
31 | log.Error("Error setting metric: %v", err)
32 | return
33 | }
34 | }
35 | }
36 |
37 | // IngestMetric is a util by which we publish data in batches .Reason for this is to avoid publishing large data in one go and its a limitation for NewRelic.
38 | func IngestMetric(metricList []interface{}, eventName string, pgIntegration *integration.Integration, cp *commonparameters.CommonParameters) error {
39 | instanceEntity, err := CreateEntity(pgIntegration, cp)
40 | if err != nil {
41 | log.Error("Error creating entity: %v", err)
42 | return err
43 | }
44 |
45 | metricCount := 0
46 |
47 | for _, model := range metricList {
48 | if model == nil {
49 | continue
50 | }
51 | metricCount += 1
52 | metricSet := instanceEntity.NewMetricSet(eventName)
53 |
54 | processErr := ProcessModel(model, metricSet)
55 | if processErr != nil {
56 | log.Error("Error processing model: %v", processErr)
57 | continue
58 | }
59 |
60 | if metricCount == PublishThreshold {
61 | metricCount = 0
62 | if err := PublishMetrics(pgIntegration, &instanceEntity, cp); err != nil {
63 | log.Error("Error publishing metrics: %v", err)
64 | return err
65 | }
66 | }
67 | }
68 | if metricCount > 0 {
69 | if err := PublishMetrics(pgIntegration, &instanceEntity, cp); err != nil {
70 | log.Error("Error publishing metrics: %v", err)
71 | return err
72 | }
73 | }
74 | return nil
75 | }
76 |
77 | func CreateEntity(pgIntegration *integration.Integration, cp *commonparameters.CommonParameters) (*integration.Entity, error) {
78 | return pgIntegration.Entity(fmt.Sprintf("%s:%s", cp.Host, cp.Port), "pg-instance")
79 | }
80 |
81 | func ProcessModel(model interface{}, metricSet *metric.Set) error {
82 | modelValue := reflect.ValueOf(model)
83 | if modelValue.Kind() == reflect.Ptr {
84 | modelValue = modelValue.Elem()
85 | }
86 | if !modelValue.IsValid() || modelValue.Kind() != reflect.Struct {
87 | log.Error("Invalid model type: %v", modelValue.Kind())
88 | return ErrInvalidModelType
89 | }
90 |
91 | modelType := reflect.TypeOf(model)
92 |
93 | for i := 0; i < modelValue.NumField(); i++ {
94 | field := modelValue.Field(i)
95 | fieldType := modelType.Field(i)
96 | metricName := fieldType.Tag.Get("metric_name")
97 | sourceType := fieldType.Tag.Get("source_type")
98 | ingestData := fieldType.Tag.Get("ingest_data")
99 |
100 | if ingestData == "false" {
101 | continue
102 | }
103 |
104 | if field.Kind() == reflect.Ptr && !field.IsNil() {
105 | SetMetric(metricSet, metricName, field.Elem().Interface(), sourceType)
106 | } else if field.Kind() != reflect.Ptr {
107 | SetMetric(metricSet, metricName, field.Interface(), sourceType)
108 | }
109 | }
110 | return nil
111 | }
112 |
113 | func PublishMetrics(pgIntegration *integration.Integration, instanceEntity **integration.Entity, cp *commonparameters.CommonParameters) error {
114 | if err := pgIntegration.Publish(); err != nil {
115 | return err
116 | }
117 | var err error
118 | *instanceEntity, err = CreateEntity(pgIntegration, cp)
119 | return err
120 | }
121 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/common-utils/ingestion_helper_test.go:
--------------------------------------------------------------------------------
1 | package commonutils_test
2 |
3 | import (
4 | "testing"
5 |
6 | common_parameters "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-parameters"
7 |
8 | "github.com/newrelic/infra-integrations-sdk/v3/integration"
9 | "github.com/newrelic/nri-postgresql/src/args"
10 | commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils"
11 | "github.com/stretchr/testify/assert"
12 | )
13 |
14 | func TestSetMetric(t *testing.T) {
15 | pgIntegration, _ := integration.New("test", "1.0.0")
16 | entity, _ := pgIntegration.Entity("test-entity", "test-type")
17 | metricSet := entity.NewMetricSet("test-event")
18 | commonutils.SetMetric(metricSet, "testGauge", 123.0, "gauge")
19 | assert.Equal(t, 123.0, metricSet.Metrics["testGauge"])
20 | commonutils.SetMetric(metricSet, "testAttribute", "value", "attribute")
21 | assert.Equal(t, "value", metricSet.Metrics["testAttribute"])
22 | commonutils.SetMetric(metricSet, "testDefault", 456.0, "unknown")
23 | assert.Equal(t, 456.0, metricSet.Metrics["testDefault"])
24 | }
25 |
26 | func TestIngestMetric(t *testing.T) {
27 | pgIntegration, _ := integration.New("test", "1.0.0")
28 | args := args.ArgumentList{
29 | Hostname: "localhost",
30 | Port: "5432",
31 | }
32 | cp := common_parameters.SetCommonParameters(args, uint64(14), "testdb")
33 | metricList := []interface{}{
34 | struct {
35 | TestField int `metric_name:"testField" source_type:"gauge"`
36 | }{TestField: 123},
37 | }
38 | err := commonutils.IngestMetric(metricList, "testEvent", pgIntegration, cp)
39 | if err != nil {
40 | t.Error(err)
41 | return
42 | }
43 | assert.NotEmpty(t, pgIntegration.Entities)
44 | }
45 |
46 | func TestCreateEntity(t *testing.T) {
47 | pgIntegration, _ := integration.New("test", "1.0.0")
48 | args := args.ArgumentList{
49 | Hostname: "localhost",
50 | Port: "5432",
51 | }
52 | cp := common_parameters.SetCommonParameters(args, uint64(14), "testdb")
53 |
54 | entity, err := commonutils.CreateEntity(pgIntegration, cp)
55 | assert.NoError(t, err)
56 | assert.NotNil(t, entity)
57 | assert.Equal(t, "localhost:5432", entity.Metadata.Name)
58 | }
59 |
60 | func TestProcessModel(t *testing.T) {
61 | pgIntegration, _ := integration.New("test", "1.0.0")
62 | entity, _ := pgIntegration.Entity("test-entity", "test-type")
63 |
64 | metricSet := entity.NewMetricSet("test-event")
65 |
66 | model := struct {
67 | TestField int `metric_name:"testField" source_type:"gauge"`
68 | }{TestField: 123}
69 |
70 | err := commonutils.ProcessModel(model, metricSet)
71 | assert.NoError(t, err)
72 | assert.Equal(t, 123.0, metricSet.Metrics["testField"])
73 | }
74 |
75 | func TestPublishMetrics(t *testing.T) {
76 | pgIntegration, _ := integration.New("test", "1.0.0")
77 | args := args.ArgumentList{
78 | Hostname: "localhost",
79 | Port: "5432",
80 | }
81 | cp := common_parameters.SetCommonParameters(args, uint64(14), "testdb")
82 | entity, _ := commonutils.CreateEntity(pgIntegration, cp)
83 |
84 | err := commonutils.PublishMetrics(pgIntegration, &entity, cp)
85 | assert.NoError(t, err)
86 | assert.NotNil(t, entity)
87 | }
88 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/common-utils/query_fetch_helpers.go:
--------------------------------------------------------------------------------
1 | package commonutils
2 |
3 | import (
4 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries"
5 | )
6 |
7 | func FetchVersionSpecificSlowQueries(version uint64) (string, error) {
8 | switch {
9 | case version == PostgresVersion12:
10 | return queries.SlowQueriesForV12, nil
11 | case version >= PostgresVersion13:
12 | return queries.SlowQueriesForV13AndAbove, nil
13 | default:
14 | return "", ErrUnsupportedVersion
15 | }
16 | }
17 |
18 | func FetchVersionSpecificBlockingQueries(version uint64) (string, error) {
19 | switch {
20 | case version == PostgresVersion12, version == PostgresVersion13:
21 | return queries.BlockingQueriesForV12AndV13, nil
22 | case version >= PostgresVersion14:
23 | return queries.BlockingQueriesForV14AndAbove, nil
24 | default:
25 | return "", ErrUnsupportedVersion
26 | }
27 | }
28 |
29 | func FetchVersionSpecificIndividualQueries(version uint64) (string, error) {
30 | switch {
31 | case version == PostgresVersion12:
32 | return queries.IndividualQuerySearchV12, nil
33 | case version > PostgresVersion12:
34 | return queries.IndividualQuerySearchV13AndAbove, nil
35 | default:
36 | return "", ErrUnsupportedVersion
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/common-utils/query_fetch_helpers_test.go:
--------------------------------------------------------------------------------
1 | package commonutils_test
2 |
3 | import (
4 | "testing"
5 |
6 | commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils"
7 |
8 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries"
9 | "github.com/stretchr/testify/assert"
10 | )
11 |
12 | func runTestCases(t *testing.T, tests []struct {
13 | version uint64
14 | expected string
15 | expectErr bool
16 | }, fetchFunc func(uint64) (string, error)) {
17 | for _, test := range tests {
18 | result, err := fetchFunc(test.version)
19 | if test.expectErr {
20 | assert.Error(t, err)
21 | } else {
22 | assert.NoError(t, err)
23 | assert.Equal(t, test.expected, result)
24 | }
25 | }
26 | }
27 |
28 | func TestFetchVersionSpecificSlowQueries(t *testing.T) {
29 | tests := []struct {
30 | version uint64
31 | expected string
32 | expectErr bool
33 | }{
34 | {commonutils.PostgresVersion12, queries.SlowQueriesForV12, false},
35 | {commonutils.PostgresVersion13, queries.SlowQueriesForV13AndAbove, false},
36 | {commonutils.PostgresVersion11, "", true},
37 | }
38 |
39 | runTestCases(t, tests, commonutils.FetchVersionSpecificSlowQueries)
40 | }
41 |
42 | func TestFetchVersionSpecificBlockingQueries(t *testing.T) {
43 | tests := []struct {
44 | version uint64
45 | expected string
46 | expectErr bool
47 | }{
48 | {commonutils.PostgresVersion12, queries.BlockingQueriesForV12AndV13, false},
49 | {commonutils.PostgresVersion13, queries.BlockingQueriesForV12AndV13, false},
50 | {commonutils.PostgresVersion14, queries.BlockingQueriesForV14AndAbove, false},
51 | {commonutils.PostgresVersion11, "", true},
52 | }
53 |
54 | runTestCases(t, tests, commonutils.FetchVersionSpecificBlockingQueries)
55 | }
56 |
57 | func TestFetchVersionSpecificIndividualQueries(t *testing.T) {
58 | tests := []struct {
59 | version uint64
60 | expected string
61 | expectErr bool
62 | }{
63 | {commonutils.PostgresVersion12, queries.IndividualQuerySearchV12, false},
64 | {commonutils.PostgresVersion13, queries.IndividualQuerySearchV13AndAbove, false},
65 | {commonutils.PostgresVersion14, queries.IndividualQuerySearchV13AndAbove, false},
66 | {commonutils.PostgresVersion11, "", true},
67 | }
68 |
69 | runTestCases(t, tests, commonutils.FetchVersionSpecificIndividualQueries)
70 | }
71 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/performance-metrics/blocking_sessions.go:
--------------------------------------------------------------------------------
1 | package performancemetrics
2 |
3 | import (
4 | "fmt"
5 |
6 | commonparameters "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-parameters"
7 |
8 | "github.com/newrelic/infra-integrations-sdk/v3/integration"
9 | commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils"
10 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/validations"
11 |
12 | "github.com/newrelic/infra-integrations-sdk/v3/log"
13 | performancedbconnection "github.com/newrelic/nri-postgresql/src/connection"
14 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/datamodels"
15 | )
16 |
17 | func PopulateBlockingMetrics(conn *performancedbconnection.PGSQLConnection, pgIntegration *integration.Integration, cp *commonparameters.CommonParameters, enabledExtensions map[string]bool) {
18 | isEligible, enableCheckError := validations.CheckBlockingSessionMetricsFetchEligibility(enabledExtensions, cp.Version)
19 | if enableCheckError != nil {
20 | log.Error("Error executing query: %v in PopulateBlockingMetrics", enableCheckError)
21 | return
22 | }
23 | if !isEligible {
24 | log.Debug("Extension 'pg_stat_statements' is not enabled or unsupported version.")
25 | return
26 | }
27 | blockingQueriesMetricsList, blockQueryFetchErr := getBlockingMetrics(conn, cp)
28 | if blockQueryFetchErr != nil {
29 | log.Error("Error fetching Blocking queries: %v", blockQueryFetchErr)
30 | return
31 | }
32 | if len(blockingQueriesMetricsList) == 0 {
33 | log.Debug("No Blocking queries found.")
34 | return
35 | }
36 | err := commonutils.IngestMetric(blockingQueriesMetricsList, "PostgresBlockingSessions", pgIntegration, cp)
37 | if err != nil {
38 | log.Error("Error ingesting Blocking queries: %v", err)
39 | return
40 | }
41 | }
42 |
43 | func getBlockingMetrics(conn *performancedbconnection.PGSQLConnection, cp *commonparameters.CommonParameters) ([]interface{}, error) {
44 | var blockingQueriesMetricsList []interface{}
45 | versionSpecificBlockingQuery, err := commonutils.FetchVersionSpecificBlockingQueries(cp.Version)
46 | if err != nil {
47 | log.Error("Unsupported postgres version: %v", err)
48 | return nil, err
49 | }
50 | var query = fmt.Sprintf(versionSpecificBlockingQuery, cp.Databases, cp.QueryMonitoringCountThreshold)
51 | rows, err := conn.Queryx(query)
52 | if err != nil {
53 | log.Error("Failed to execute query: %v", err)
54 | return nil, commonutils.ErrUnExpectedError
55 | }
56 | defer rows.Close()
57 | for rows.Next() {
58 | var blockingQueryMetric datamodels.BlockingSessionMetrics
59 | if scanError := rows.StructScan(&blockingQueryMetric); scanError != nil {
60 | return nil, scanError
61 | }
62 | // For PostgreSQL versions 13 and 12, anonymization of queries does not occur for blocking sessions, so it's necessary to explicitly anonymize them.
63 | if cp.Version == commonutils.PostgresVersion13 || cp.Version == commonutils.PostgresVersion12 {
64 | *blockingQueryMetric.BlockedQuery = commonutils.AnonymizeQueryText(*blockingQueryMetric.BlockedQuery)
65 | *blockingQueryMetric.BlockingQuery = commonutils.AnonymizeQueryText(*blockingQueryMetric.BlockingQuery)
66 | }
67 | blockingQueriesMetricsList = append(blockingQueriesMetricsList, blockingQueryMetric)
68 | }
69 |
70 | return blockingQueriesMetricsList, nil
71 | }
72 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/performance-metrics/blocking_sessions_test.go:
--------------------------------------------------------------------------------
1 | package performancemetrics
2 |
3 | import (
4 | "database/sql/driver"
5 | "fmt"
6 | "regexp"
7 | "testing"
8 |
9 | commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils"
10 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/datamodels"
11 |
12 | "github.com/newrelic/nri-postgresql/src/args"
13 | "github.com/newrelic/nri-postgresql/src/connection"
14 | common_parameters "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-parameters"
15 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries"
16 | "github.com/stretchr/testify/assert"
17 | "gopkg.in/DATA-DOG/go-sqlmock.v1"
18 | )
19 |
20 | func TestGetBlockingMetrics(t *testing.T) {
21 | conn, mock := connection.CreateMockSQL(t)
22 | args := args.ArgumentList{QueryMonitoringCountThreshold: 10}
23 | databaseName := "testdb"
24 | version := uint64(13)
25 | cp := common_parameters.SetCommonParameters(args, version, databaseName)
26 | expectedQuery := queries.BlockingQueriesForV12AndV13
27 | query := fmt.Sprintf(expectedQuery, databaseName, args.QueryMonitoringCountThreshold)
28 | rowData := []driver.Value{
29 | "newrelic_value", int64(123), "SELECT 1", "1233444", "2023-01-01 00:00:00", "testdb",
30 | int64(456), "SELECT 2", "4566", "2023-01-01 00:00:00",
31 | }
32 | expectedRows := [][]driver.Value{
33 | rowData, rowData,
34 | }
35 | mockRows := sqlmock.NewRows([]string{
36 | "newrelic", "blocked_pid", "blocked_query", "blocked_query_id", "blocked_query_start", "database_name",
37 | "blocking_pid", "blocking_query", "blocking_query_id", "blocking_query_start",
38 | }).AddRow(rowData...).AddRow(rowData...)
39 | mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnRows(mockRows)
40 | blockingQueriesMetricsList, err := getBlockingMetrics(conn, cp)
41 | compareMockRowsWithMetrics(t, expectedRows, blockingQueriesMetricsList)
42 | assert.NoError(t, err)
43 | assert.Len(t, blockingQueriesMetricsList, 2)
44 | assert.NoError(t, mock.ExpectationsWereMet())
45 | }
46 |
47 | func compareMockRowsWithMetrics(t *testing.T, expectedRows [][]driver.Value, blockingQueriesMetricsList []interface{}) {
48 | assert.Equal(t, 2, len(blockingQueriesMetricsList))
49 | for index := range blockingQueriesMetricsList {
50 | anonymizeQuery := commonutils.AnonymizeQueryText(expectedRows[index][2].(string))
51 | blockingSession := blockingQueriesMetricsList[index].(datamodels.BlockingSessionMetrics)
52 | assert.Equal(t, expectedRows[index][0], *blockingSession.Newrelic)
53 | assert.Equal(t, expectedRows[index][1], *blockingSession.BlockedPid)
54 | assert.Equal(t, anonymizeQuery, *blockingSession.BlockedQuery)
55 | assert.Equal(t, expectedRows[index][3], *blockingSession.BlockedQueryID)
56 | assert.Equal(t, expectedRows[index][4], *blockingSession.BlockedQueryStart)
57 | assert.Equal(t, expectedRows[index][5], *blockingSession.BlockedDatabase)
58 | assert.Equal(t, expectedRows[index][6], *blockingSession.BlockingPid)
59 | assert.Equal(t, anonymizeQuery, *blockingSession.BlockingQuery)
60 | assert.Equal(t, expectedRows[index][8], *blockingSession.BlockingQueryID)
61 | assert.Equal(t, expectedRows[index][9], *blockingSession.BlockingQueryStart)
62 | }
63 | }
64 |
65 | func TestGetBlockingMetricsErr(t *testing.T) {
66 | conn, mock := connection.CreateMockSQL(t)
67 | args := args.ArgumentList{QueryMonitoringCountThreshold: 10}
68 | databaseName := "testdb"
69 | version := uint64(13)
70 | cp := common_parameters.SetCommonParameters(args, version, databaseName)
71 | _, err := getBlockingMetrics(conn, cp)
72 | assert.EqualError(t, err, commonutils.ErrUnExpectedError.Error())
73 | assert.NoError(t, mock.ExpectationsWereMet())
74 | }
75 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/performance-metrics/execution_plan_metrics.go:
--------------------------------------------------------------------------------
1 | package performancemetrics
2 |
3 | import (
4 | "encoding/json"
5 |
6 | "github.com/go-viper/mapstructure/v2"
7 | "github.com/newrelic/infra-integrations-sdk/v3/integration"
8 | "github.com/newrelic/infra-integrations-sdk/v3/log"
9 | performancedbconnection "github.com/newrelic/nri-postgresql/src/connection"
10 | commonparameters "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-parameters"
11 | commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils"
12 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/datamodels"
13 | )
14 |
15 | func PopulateExecutionPlanMetrics(results []datamodels.IndividualQueryMetrics, pgIntegration *integration.Integration, cp *commonparameters.CommonParameters, connectionInfo performancedbconnection.Info) {
16 | if len(results) == 0 {
17 | log.Debug("No individual queries found.")
18 | return
19 | }
20 | executionDetailsList := getExecutionPlanMetrics(results, connectionInfo)
21 | err := commonutils.IngestMetric(executionDetailsList, "PostgresExecutionPlanMetrics", pgIntegration, cp)
22 | if err != nil {
23 | log.Error("Error ingesting Execution Plan metrics: %v", err)
24 | return
25 | }
26 | }
27 |
28 | func getExecutionPlanMetrics(results []datamodels.IndividualQueryMetrics, connectionInfo performancedbconnection.Info) []interface{} {
29 | var executionPlanMetricsList []interface{}
30 | var groupIndividualQueriesByDatabase = groupQueriesByDatabase(results)
31 | for dbName, individualQueriesList := range groupIndividualQueriesByDatabase {
32 | dbConn, err := connectionInfo.NewConnection(dbName)
33 | if err != nil {
34 | log.Error("Error opening database connection: %v", err)
35 | continue
36 | }
37 | processExecutionPlanOfQueries(individualQueriesList, dbConn, &executionPlanMetricsList)
38 | dbConn.Close()
39 | }
40 |
41 | return executionPlanMetricsList
42 | }
43 |
44 | func processExecutionPlanOfQueries(individualQueriesList []datamodels.IndividualQueryMetrics, dbConn *performancedbconnection.PGSQLConnection, executionPlanMetricsList *[]interface{}) {
45 | for _, individualQuery := range individualQueriesList {
46 | if individualQuery.RealQueryText == nil || individualQuery.QueryID == nil || individualQuery.DatabaseName == nil {
47 | log.Error("QueryText, QueryID or Database Name is nil")
48 | continue
49 | }
50 | query := "EXPLAIN (FORMAT JSON) " + *individualQuery.RealQueryText
51 | rows, err := dbConn.Queryx(query)
52 | if err != nil {
53 | log.Debug("Error executing query: %v", err)
54 | continue
55 | }
56 | defer rows.Close()
57 | if !rows.Next() {
58 | log.Debug("Execution plan not found for queryId", *individualQuery.QueryID)
59 | continue
60 | }
61 | var execPlanJSON string
62 | if scanErr := rows.Scan(&execPlanJSON); scanErr != nil {
63 | log.Error("Error scanning row: ", scanErr.Error())
64 | continue
65 | }
66 |
67 | var execPlan []map[string]interface{}
68 | err = json.Unmarshal([]byte(execPlanJSON), &execPlan)
69 | if err != nil {
70 | log.Error("Failed to unmarshal execution plan: %v", err)
71 | continue
72 | }
73 | validateAndFetchNestedExecPlan(execPlan, individualQuery, executionPlanMetricsList)
74 | }
75 | }
76 |
77 | func validateAndFetchNestedExecPlan(execPlan []map[string]interface{}, individualQuery datamodels.IndividualQueryMetrics, executionPlanMetricsList *[]interface{}) {
78 | level := 0
79 | if len(execPlan) > 0 {
80 | if plan, ok := execPlan[0]["Plan"].(map[string]interface{}); ok {
81 | fetchNestedExecutionPlanDetails(individualQuery, &level, plan, executionPlanMetricsList)
82 | } else {
83 | log.Debug("execPlan is not in correct datatype")
84 | }
85 | } else {
86 | log.Debug("execPlan is empty")
87 | }
88 | }
89 |
90 | func groupQueriesByDatabase(results []datamodels.IndividualQueryMetrics) map[string][]datamodels.IndividualQueryMetrics {
91 | databaseMap := make(map[string][]datamodels.IndividualQueryMetrics)
92 | for _, individualQueryMetric := range results {
93 | if individualQueryMetric.DatabaseName == nil {
94 | continue
95 | }
96 | dbName := *individualQueryMetric.DatabaseName
97 | databaseMap[dbName] = append(databaseMap[dbName], individualQueryMetric)
98 | }
99 | return databaseMap
100 | }
101 |
102 | func fetchNestedExecutionPlanDetails(individualQuery datamodels.IndividualQueryMetrics, level *int, execPlan map[string]interface{}, executionPlanMetricsList *[]interface{}) {
103 | var execPlanMetrics datamodels.QueryExecutionPlanMetrics
104 | err := mapstructure.Decode(execPlan, &execPlanMetrics)
105 | if err != nil {
106 | log.Error("Failed to decode execPlan to execPlanMetrics: %v", err)
107 | return
108 | }
109 | execPlanMetrics.QueryID = *individualQuery.QueryID
110 | execPlanMetrics.DatabaseName = *individualQuery.DatabaseName
111 | execPlanMetrics.Level = *level
112 | *level++
113 | execPlanMetrics.PlanID = *individualQuery.PlanID
114 | *executionPlanMetricsList = append(*executionPlanMetricsList, execPlanMetrics)
115 | if nestedPlans, ok := execPlan["Plans"].([]interface{}); ok {
116 | for _, nestedPlan := range nestedPlans {
117 | if nestedPlanMap, nestedOk := nestedPlan.(map[string]interface{}); nestedOk {
118 | fetchNestedExecutionPlanDetails(individualQuery, level, nestedPlanMap, executionPlanMetricsList)
119 | }
120 | }
121 | }
122 | }
123 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/performance-metrics/execution_plan_metrics_test.go:
--------------------------------------------------------------------------------
1 | package performancemetrics
2 |
3 | import (
4 | "testing"
5 |
6 | performancedbconnection "github.com/newrelic/nri-postgresql/src/connection"
7 |
8 | common_parameters "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-parameters"
9 |
10 | "github.com/newrelic/infra-integrations-sdk/v3/integration"
11 | "github.com/newrelic/nri-postgresql/src/args"
12 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/datamodels"
13 | "github.com/stretchr/testify/assert"
14 | )
15 |
16 | func TestPopulateExecutionPlanMetrics(t *testing.T) {
17 | pgIntegration, _ := integration.New("test", "1.0.0")
18 | args := args.ArgumentList{}
19 | results := []datamodels.IndividualQueryMetrics{}
20 | cp := common_parameters.SetCommonParameters(args, uint64(13), "testdb")
21 | connectionInfo := performancedbconnection.DefaultConnectionInfo(&args)
22 | PopulateExecutionPlanMetrics(results, pgIntegration, cp, connectionInfo)
23 | assert.Empty(t, pgIntegration.Entities)
24 | }
25 |
26 | func TestGroupQueriesByDatabase(t *testing.T) {
27 | databaseName := "testdb"
28 | queryID := "queryid1"
29 | queryText := "SELECT 1"
30 | results := []datamodels.IndividualQueryMetrics{
31 | {
32 | QueryID: &queryID,
33 | QueryText: &queryText,
34 | DatabaseName: &databaseName,
35 | },
36 | }
37 |
38 | groupedQueries := groupQueriesByDatabase(results)
39 | assert.Len(t, groupedQueries, 1)
40 | assert.Contains(t, groupedQueries, databaseName)
41 | assert.Len(t, groupedQueries[databaseName], 1)
42 | }
43 |
44 | func TestFetchNestedExecutionPlanDetails(t *testing.T) {
45 | queryID := "queryid1"
46 | queryText := "SELECT 1"
47 | databaseName := "testdb"
48 | planID := "planid1"
49 | individualQuery := datamodels.IndividualQueryMetrics{
50 | QueryID: &queryID,
51 | QueryText: &queryText,
52 | DatabaseName: &databaseName,
53 | PlanID: &planID,
54 | }
55 | execPlan := map[string]interface{}{
56 | "Node Type": "Seq Scan",
57 | "Relation Name": "test_table",
58 | "Alias": "test_table",
59 | "Startup Cost": 0.00,
60 | "Total Cost": 1000.00,
61 | "Plan Rows": 100000,
62 | "Plan Width": 4,
63 | }
64 | execPlanLevel2 := map[string]interface{}{
65 | "Node Type": "Seq Scan",
66 | "Relation Name": "test_table",
67 | "Alias": "test_table",
68 | "Startup Cost": 0.00,
69 | "Total Cost": 1000.00,
70 | "Plan Rows": 100000,
71 | "Plan Width": 4,
72 | "Plans": []interface{}{execPlan},
73 | }
74 | execPlanLevel3 := map[string]interface{}{
75 | "Node Type": "Seq Scan",
76 | "Relation Name": "test_table",
77 | "Alias": "test_table",
78 | "Startup Cost": 0.00,
79 | "Total Cost": 1000.00,
80 | "Plan Rows": 100000,
81 | "Plan Width": 4,
82 | "Plans": []interface{}{execPlanLevel2},
83 | }
84 | var executionPlanMetricsList []interface{}
85 | level := 0
86 |
87 | fetchNestedExecutionPlanDetails(individualQuery, &level, execPlanLevel3, &executionPlanMetricsList)
88 | assert.Len(t, executionPlanMetricsList, 3)
89 | }
90 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/performance-metrics/individual_query_metrics.go:
--------------------------------------------------------------------------------
1 | package performancemetrics
2 |
3 | import (
4 | "fmt"
5 |
6 | "github.com/jmoiron/sqlx"
7 |
8 | "github.com/newrelic/infra-integrations-sdk/v3/integration"
9 | "github.com/newrelic/infra-integrations-sdk/v3/log"
10 | performancedbconnection "github.com/newrelic/nri-postgresql/src/connection"
11 | commonparameters "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-parameters"
12 | commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils"
13 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/datamodels"
14 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/validations"
15 | )
16 |
17 | type queryInfoMap map[string]string
18 | type databaseQueryInfoMap map[string]queryInfoMap
19 |
20 | func PopulateIndividualQueryMetrics(conn *performancedbconnection.PGSQLConnection, slowRunningQueries []datamodels.SlowRunningQueryMetrics, pgIntegration *integration.Integration, cp *commonparameters.CommonParameters, enabledExtensions map[string]bool) []datamodels.IndividualQueryMetrics {
21 | isEligible, err := validations.CheckIndividualQueryMetricsFetchEligibility(enabledExtensions)
22 | if err != nil {
23 | log.Error("Error executing query: %v", err)
24 | return nil
25 | }
26 | if !isEligible {
27 | log.Debug("Extension 'pg_stat_monitor' is not enabled or unsupported version.")
28 | return nil
29 | }
30 | log.Debug("Extension 'pg_stat_monitor' enabled.")
31 | individualQueryMetricsInterface, individualQueriesList := getIndividualQueryMetrics(conn, slowRunningQueries, cp)
32 | if len(individualQueryMetricsInterface) == 0 {
33 | log.Debug("No individual queries found.")
34 | return nil
35 | }
36 | err = commonutils.IngestMetric(individualQueryMetricsInterface, "PostgresIndividualQueries", pgIntegration, cp)
37 | if err != nil {
38 | log.Error("Error ingesting individual queries: %v", err)
39 | return nil
40 | }
41 | return individualQueriesList
42 | }
43 |
44 | func getIndividualQueryMetrics(conn *performancedbconnection.PGSQLConnection, slowRunningQueries []datamodels.SlowRunningQueryMetrics, cp *commonparameters.CommonParameters) ([]interface{}, []datamodels.IndividualQueryMetrics) {
45 | if len(slowRunningQueries) == 0 {
46 | log.Debug("No slow running queries found.")
47 | return nil, nil
48 | }
49 | var individualQueryMetricsList []datamodels.IndividualQueryMetrics
50 | var individualQueryMetricsListInterface []interface{}
51 | anonymizedQueriesByDB := processForAnonymizeQueryMap(slowRunningQueries)
52 | versionSpecificIndividualQuery, err := commonutils.FetchVersionSpecificIndividualQueries(cp.Version)
53 | if err != nil {
54 | log.Error("Unsupported postgres version: %v", err)
55 | return nil, nil
56 | }
57 | for _, slowRunningMetric := range slowRunningQueries {
58 | if slowRunningMetric.QueryID == nil {
59 | continue
60 | }
61 | query := fmt.Sprintf(versionSpecificIndividualQuery, *slowRunningMetric.QueryID, cp.Databases, cp.QueryMonitoringResponseTimeThreshold, min(cp.QueryMonitoringCountThreshold, commonutils.MaxIndividualQueryCountThreshold))
62 | rows, err := conn.Queryx(query)
63 | if err != nil {
64 | log.Debug("Error executing query in individual query: %v", err)
65 | return nil, nil
66 | }
67 | defer rows.Close()
68 | individualQuerySamplesList := processRows(rows, anonymizedQueriesByDB)
69 | for _, individualQuery := range individualQuerySamplesList {
70 | individualQueryMetricsList = append(individualQueryMetricsList, individualQuery)
71 | individualQueryMetricsListInterface = append(individualQueryMetricsListInterface, individualQuery)
72 | }
73 | }
74 | return individualQueryMetricsListInterface, individualQueryMetricsList
75 | }
76 |
77 | func processRows(rows *sqlx.Rows, anonymizedQueriesByDB databaseQueryInfoMap) []datamodels.IndividualQueryMetrics {
78 | var individualQueryMetricsList []datamodels.IndividualQueryMetrics
79 | for rows.Next() {
80 | var model datamodels.IndividualQueryMetrics
81 | if scanErr := rows.StructScan(&model); scanErr != nil {
82 | log.Error("Could not scan row: ", scanErr)
83 | continue
84 | }
85 | if model.QueryID == nil || model.DatabaseName == nil {
86 | log.Error("QueryID or DatabaseName is nil")
87 | continue
88 | }
89 | individualQueryMetric := model
90 | anonymizedQueryText := anonymizedQueriesByDB[*model.DatabaseName][*model.QueryID]
91 | queryText := *model.QueryText
92 | individualQueryMetric.RealQueryText = &queryText
93 | individualQueryMetric.QueryText = &anonymizedQueryText
94 | generatedPlanID, err := commonutils.GeneratePlanID()
95 | if err != nil {
96 | log.Error("Error generating plan ID: %v", err)
97 | continue
98 | }
99 | individualQueryMetric.PlanID = &generatedPlanID
100 | individualQueryMetricsList = append(individualQueryMetricsList, individualQueryMetric)
101 | }
102 | return individualQueryMetricsList
103 | }
104 |
105 | func processForAnonymizeQueryMap(slowRunningMetricList []datamodels.SlowRunningQueryMetrics) databaseQueryInfoMap {
106 | anonymizeQueryMapByDB := make(databaseQueryInfoMap)
107 | for _, metric := range slowRunningMetricList {
108 | if metric.DatabaseName == nil || metric.QueryID == nil || metric.QueryText == nil {
109 | continue
110 | }
111 | dbName := *metric.DatabaseName
112 | queryID := *metric.QueryID
113 | anonymizedQuery := *metric.QueryText
114 |
115 | if _, exists := anonymizeQueryMapByDB[dbName]; !exists {
116 | anonymizeQueryMapByDB[dbName] = make(map[string]string)
117 | }
118 | anonymizeQueryMapByDB[dbName][queryID] = anonymizedQuery
119 | }
120 | return anonymizeQueryMapByDB
121 | }
122 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/performance-metrics/individual_query_metrics_test.go:
--------------------------------------------------------------------------------
1 | package performancemetrics
2 |
3 | import (
4 | "fmt"
5 | "regexp"
6 | "testing"
7 |
8 | "github.com/newrelic/nri-postgresql/src/args"
9 | "github.com/newrelic/nri-postgresql/src/connection"
10 | common_parameters "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-parameters"
11 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/datamodels"
12 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries"
13 | "github.com/stretchr/testify/assert"
14 | "gopkg.in/DATA-DOG/go-sqlmock.v1"
15 | )
16 |
17 | func TestGetIndividualQueryMetrics(t *testing.T) {
18 | conn, mock := connection.CreateMockSQL(t)
19 | args := args.ArgumentList{QueryMonitoringCountThreshold: 10}
20 | databaseName := "testdb"
21 | version := uint64(13)
22 | mockQueryID := "-123"
23 | mockQueryText := "SELECT 1"
24 | cp := common_parameters.SetCommonParameters(args, version, databaseName)
25 |
26 | // Mock the individual query
27 | query := fmt.Sprintf(queries.IndividualQuerySearchV13AndAbove, mockQueryID, databaseName, args.QueryMonitoringResponseTimeThreshold, args.QueryMonitoringCountThreshold)
28 | mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnRows(sqlmock.NewRows([]string{
29 | "newrelic", "query", "queryid", "datname", "planid", "cpu_time_ms", "exec_time_ms",
30 | }).AddRow(
31 | "newrelic_value", "SELECT 1", "queryid1", "testdb", "planid1", 10.0, 20.0,
32 | ))
33 |
34 | slowRunningQueries := []datamodels.SlowRunningQueryMetrics{
35 | {
36 | QueryID: &mockQueryID,
37 | QueryText: &mockQueryText,
38 | DatabaseName: &databaseName,
39 | },
40 | }
41 |
42 | individualQueryMetricsInterface, individualQueryMetrics := getIndividualQueryMetrics(conn, slowRunningQueries, cp)
43 |
44 | assert.Len(t, individualQueryMetricsInterface, 1)
45 | assert.Len(t, individualQueryMetrics, 1)
46 | assert.NoError(t, mock.ExpectationsWereMet())
47 | }
48 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/performance-metrics/slow_query_metrics.go:
--------------------------------------------------------------------------------
1 | package performancemetrics
2 |
3 | import (
4 | "fmt"
5 |
6 | "github.com/newrelic/infra-integrations-sdk/v3/integration"
7 | "github.com/newrelic/infra-integrations-sdk/v3/log"
8 | performancedbconnection "github.com/newrelic/nri-postgresql/src/connection"
9 | commonparameters "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-parameters"
10 | commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils"
11 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/datamodels"
12 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/validations"
13 | )
14 |
15 | func getSlowRunningMetrics(conn *performancedbconnection.PGSQLConnection, cp *commonparameters.CommonParameters) ([]datamodels.SlowRunningQueryMetrics, []interface{}, error) {
16 | var slowQueryMetricsList []datamodels.SlowRunningQueryMetrics
17 | var slowQueryMetricsListInterface []interface{}
18 | versionSpecificSlowQuery, err := commonutils.FetchVersionSpecificSlowQueries(cp.Version)
19 | if err != nil {
20 | log.Error("Unsupported postgres version: %v", err)
21 | return nil, nil, err
22 | }
23 | var query = fmt.Sprintf(versionSpecificSlowQuery, cp.Databases, cp.QueryMonitoringCountThreshold)
24 | rows, err := conn.Queryx(query)
25 | if err != nil {
26 | return nil, nil, err
27 | }
28 | defer rows.Close()
29 | for rows.Next() {
30 | var slowQuery datamodels.SlowRunningQueryMetrics
31 | if scanErr := rows.StructScan(&slowQuery); scanErr != nil {
32 | return nil, nil, err
33 | }
34 | slowQueryMetricsList = append(slowQueryMetricsList, slowQuery)
35 | slowQueryMetricsListInterface = append(slowQueryMetricsListInterface, slowQuery)
36 | }
37 | return slowQueryMetricsList, slowQueryMetricsListInterface, nil
38 | }
39 |
40 | func PopulateSlowRunningMetrics(conn *performancedbconnection.PGSQLConnection, pgIntegration *integration.Integration, cp *commonparameters.CommonParameters, enabledExtensions map[string]bool) []datamodels.SlowRunningQueryMetrics {
41 | isEligible, err := validations.CheckSlowQueryMetricsFetchEligibility(enabledExtensions)
42 | if err != nil {
43 | log.Error("Error executing query: %v", err)
44 | return nil
45 | }
46 | if !isEligible {
47 | log.Debug("Extension 'pg_stat_statements' is not enabled or unsupported version.")
48 | return nil
49 | }
50 |
51 | slowQueryMetricsList, slowQueryMetricsListInterface, err := getSlowRunningMetrics(conn, cp)
52 | if err != nil {
53 | log.Error("Error fetching slow-running queries: %v", err)
54 | return nil
55 | }
56 |
57 | if len(slowQueryMetricsList) == 0 {
58 | log.Debug("No slow-running queries found.")
59 | return nil
60 | }
61 | err = commonutils.IngestMetric(slowQueryMetricsListInterface, "PostgresSlowQueries", pgIntegration, cp)
62 | if err != nil {
63 | log.Error("Error ingesting slow-running queries: %v", err)
64 | return nil
65 | }
66 | return slowQueryMetricsList
67 | }
68 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/performance-metrics/slow_query_metrics_test.go:
--------------------------------------------------------------------------------
1 | package performancemetrics
2 |
3 | import (
4 | "fmt"
5 | "regexp"
6 | "testing"
7 |
8 | "github.com/newrelic/nri-postgresql/src/args"
9 | "github.com/newrelic/nri-postgresql/src/connection"
10 | common_parameters "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-parameters"
11 | commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils"
12 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries"
13 | "github.com/stretchr/testify/assert"
14 | "gopkg.in/DATA-DOG/go-sqlmock.v1"
15 | )
16 |
17 | func runSlowQueryTest(t *testing.T, query string, version uint64, expectedLength int) {
18 | conn, mock := connection.CreateMockSQL(t)
19 | args := args.ArgumentList{QueryMonitoringCountThreshold: 10}
20 | databaseName := "testdb"
21 | cp := common_parameters.SetCommonParameters(args, version, databaseName)
22 |
23 | query = fmt.Sprintf(query, "testdb", args.QueryMonitoringCountThreshold)
24 | mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnRows(sqlmock.NewRows([]string{
25 | "newrelic", "query_id", "query_text", "database_name", "schema_name", "execution_count",
26 | "avg_elapsed_time_ms", "avg_disk_reads", "avg_disk_writes", "statement_type", "collection_timestamp",
27 | }).AddRow(
28 | "newrelic_value", "queryid1", "SELECT 1", "testdb", "public", 10,
29 | 15.0, 5, 2, "SELECT", "2023-01-01T00:00:00Z",
30 | ))
31 | slowQueryList, _, err := getSlowRunningMetrics(conn, cp)
32 | assert.NoError(t, err)
33 | assert.Len(t, slowQueryList, expectedLength)
34 | assert.NoError(t, mock.ExpectationsWereMet())
35 | }
36 |
37 | func TestGetSlowRunningMetrics(t *testing.T) {
38 | runSlowQueryTest(t, queries.SlowQueriesForV13AndAbove, 13, 1)
39 | }
40 |
41 | func TestGetSlowRunningMetricsV12(t *testing.T) {
42 | runSlowQueryTest(t, queries.SlowQueriesForV12, 12, 1)
43 | }
44 |
45 | func TestGetSlowRunningEmptyMetrics(t *testing.T) {
46 | conn, mock := connection.CreateMockSQL(t)
47 | args := args.ArgumentList{QueryMonitoringCountThreshold: 10}
48 | databaseName := "testdb"
49 | version := uint64(13)
50 | cp := common_parameters.SetCommonParameters(args, version, databaseName)
51 | expectedQuery := queries.SlowQueriesForV13AndAbove
52 | query := fmt.Sprintf(expectedQuery, "testdb", args.QueryMonitoringCountThreshold)
53 | mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnRows(sqlmock.NewRows([]string{
54 | "newrelic", "query_id", "query_text", "database_name", "schema_name", "execution_count",
55 | "avg_elapsed_time_ms", "avg_disk_reads", "avg_disk_writes", "statement_type", "collection_timestamp",
56 | }))
57 | slowQueryList, _, err := getSlowRunningMetrics(conn, cp)
58 |
59 | assert.NoError(t, err)
60 | assert.Len(t, slowQueryList, 0)
61 | assert.NoError(t, mock.ExpectationsWereMet())
62 | }
63 |
64 | func TestGetSlowRunningMetricsUnsupportedVersion(t *testing.T) {
65 | conn, mock := connection.CreateMockSQL(t)
66 | args := args.ArgumentList{QueryMonitoringCountThreshold: 10}
67 | databaseName := "testdb"
68 | version := uint64(11)
69 | cp := common_parameters.SetCommonParameters(args, version, databaseName)
70 | slowQueryList, _, err := getSlowRunningMetrics(conn, cp)
71 | assert.EqualError(t, err, commonutils.ErrUnsupportedVersion.Error())
72 | assert.Len(t, slowQueryList, 0)
73 | assert.NoError(t, mock.ExpectationsWereMet())
74 | }
75 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/performance-metrics/wait_event_metrics.go:
--------------------------------------------------------------------------------
1 | package performancemetrics
2 |
3 | import (
4 | "fmt"
5 |
6 | "github.com/newrelic/infra-integrations-sdk/v3/integration"
7 | "github.com/newrelic/infra-integrations-sdk/v3/log"
8 | performancedbconnection "github.com/newrelic/nri-postgresql/src/connection"
9 | commonparameters "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-parameters"
10 | commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils"
11 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/datamodels"
12 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries"
13 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/validations"
14 | )
15 |
16 | func PopulateWaitEventMetrics(conn *performancedbconnection.PGSQLConnection, pgIntegration *integration.Integration, cp *commonparameters.CommonParameters, enabledExtensions map[string]bool) error {
17 | var isEligible bool
18 | var eligibleCheckErr error
19 | isEligible, eligibleCheckErr = validations.CheckWaitEventMetricsFetchEligibility(enabledExtensions)
20 | if eligibleCheckErr != nil {
21 | log.Error("Error executing query: %v", eligibleCheckErr)
22 | return commonutils.ErrUnExpectedError
23 | }
24 | if !isEligible {
25 | log.Debug("Extension 'pg_wait_sampling' or 'pg_stat_statement' is not enabled or unsupported version.")
26 | return commonutils.ErrNotEligible
27 | }
28 | waitEventMetricsList, waitEventErr := getWaitEventMetrics(conn, cp)
29 | if waitEventErr != nil {
30 | log.Error("Error fetching wait event queries: %v", waitEventErr)
31 | return commonutils.ErrUnExpectedError
32 | }
33 | if len(waitEventMetricsList) == 0 {
34 | log.Debug("No wait event queries found.")
35 | return nil
36 | }
37 | err := commonutils.IngestMetric(waitEventMetricsList, "PostgresWaitEvents", pgIntegration, cp)
38 | if err != nil {
39 | log.Error("Error ingesting wait event queries: %v", err)
40 | return err
41 | }
42 | return nil
43 | }
44 |
45 | func getWaitEventMetrics(conn *performancedbconnection.PGSQLConnection, cp *commonparameters.CommonParameters) ([]interface{}, error) {
46 | var waitEventMetricsList []interface{}
47 | var query = fmt.Sprintf(queries.WaitEvents, cp.Databases, cp.QueryMonitoringCountThreshold)
48 | rows, err := conn.Queryx(query)
49 | if err != nil {
50 | return nil, err
51 | }
52 | defer rows.Close()
53 | for rows.Next() {
54 | var waitEvent datamodels.WaitEventMetrics
55 | if waitScanErr := rows.StructScan(&waitEvent); waitScanErr != nil {
56 | return nil, err
57 | }
58 | waitEventMetricsList = append(waitEventMetricsList, waitEvent)
59 | }
60 | return waitEventMetricsList, nil
61 | }
62 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/performance-metrics/wait_event_metrics_test.go:
--------------------------------------------------------------------------------
1 | package performancemetrics
2 |
3 | import (
4 | "fmt"
5 | "regexp"
6 | "testing"
7 |
8 | "github.com/newrelic/nri-postgresql/src/args"
9 | "github.com/newrelic/nri-postgresql/src/connection"
10 | common_parameters "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-parameters"
11 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries"
12 | "github.com/stretchr/testify/assert"
13 | "gopkg.in/DATA-DOG/go-sqlmock.v1"
14 | )
15 |
16 | func TestGetWaitEventMetrics(t *testing.T) {
17 | conn, mock := connection.CreateMockSQL(t)
18 | args := args.ArgumentList{QueryMonitoringCountThreshold: 10}
19 | databaseName := "testdb"
20 | cp := common_parameters.SetCommonParameters(args, uint64(14), databaseName)
21 |
22 | var query = fmt.Sprintf(queries.WaitEvents, databaseName, args.QueryMonitoringCountThreshold)
23 | mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnRows(sqlmock.NewRows([]string{
24 | "wait_event_name", "wait_category", "total_wait_time_ms", "collection_timestamp", "query_id", "query_text", "database_name",
25 | }).AddRow(
26 | "Locks:Lock", "Locks", 1000.0, "2023-01-01T00:00:00Z", "queryid1", "SELECT 1", "testdb",
27 | ))
28 | waitEventsList, err := getWaitEventMetrics(conn, cp)
29 |
30 | assert.NoError(t, err)
31 | assert.Len(t, waitEventsList, 1)
32 | assert.NoError(t, mock.ExpectationsWereMet())
33 | }
34 |
35 | func TestGetWaitEventEmptyMetrics(t *testing.T) {
36 | conn, mock := connection.CreateMockSQL(t)
37 | args := args.ArgumentList{QueryMonitoringCountThreshold: 10}
38 | databaseName := "testdb"
39 | cp := common_parameters.SetCommonParameters(args, uint64(14), databaseName)
40 |
41 | var query = fmt.Sprintf(queries.WaitEvents, databaseName, args.QueryMonitoringCountThreshold)
42 | mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnRows(sqlmock.NewRows([]string{
43 | "wait_event_name", "wait_category", "total_wait_time_ms", "collection_timestamp", "query_id", "query_text", "database_name",
44 | }))
45 | waitEventsList, err := getWaitEventMetrics(conn, cp)
46 | assert.NoError(t, err)
47 | assert.Len(t, waitEventsList, 0)
48 | assert.NoError(t, mock.ExpectationsWereMet())
49 | }
50 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/query_performance_main.go:
--------------------------------------------------------------------------------
1 | package queryperformancemonitoring
2 |
3 | // this is the main go file for the query_monitoring package
4 | import (
5 | "time"
6 |
7 | "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/validations"
8 |
9 | common_parameters "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-parameters"
10 |
11 | "github.com/newrelic/infra-integrations-sdk/v3/integration"
12 | "github.com/newrelic/infra-integrations-sdk/v3/log"
13 | "github.com/newrelic/nri-postgresql/src/args"
14 | "github.com/newrelic/nri-postgresql/src/collection"
15 | performancedbconnection "github.com/newrelic/nri-postgresql/src/connection"
16 | "github.com/newrelic/nri-postgresql/src/metrics"
17 | commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils"
18 | performancemetrics "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/performance-metrics"
19 | )
20 |
21 | func QueryPerformanceMain(args args.ArgumentList, pgIntegration *integration.Integration, databaseMap collection.DatabaseList) {
22 | connectionInfo := performancedbconnection.DefaultConnectionInfo(&args)
23 | if len(databaseMap) == 0 {
24 | log.Debug("No databases found")
25 | return
26 | }
27 | newConnection, err := connectionInfo.NewConnection(connectionInfo.DatabaseName())
28 | if err != nil {
29 | log.Error("Error creating connection: ", err)
30 | return
31 | }
32 | defer newConnection.Close()
33 |
34 | version, versionErr := metrics.CollectVersion(newConnection)
35 | if versionErr != nil {
36 | log.Error("Error fetching version: ", versionErr)
37 | return
38 | }
39 | versionInt := version.Major
40 | if !validations.CheckPostgresVersionSupportForQueryMonitoring(versionInt) {
41 | log.Debug("Postgres version: %d is not supported for query monitoring", versionInt)
42 | return
43 | }
44 | cp := common_parameters.SetCommonParameters(args, versionInt, commonutils.GetDatabaseListInString(databaseMap))
45 |
46 | populateQueryPerformanceMetrics(newConnection, pgIntegration, cp, connectionInfo)
47 | }
48 |
49 | func populateQueryPerformanceMetrics(newConnection *performancedbconnection.PGSQLConnection, pgIntegration *integration.Integration, cp *common_parameters.CommonParameters, connectionInfo performancedbconnection.Info) {
50 | enabledExtensions, err := validations.FetchAllExtensions(newConnection)
51 | if err != nil {
52 | log.Error("Error fetching extensions: ", err)
53 | return
54 | }
55 | start := time.Now()
56 | log.Debug("Starting PopulateSlowRunningMetrics at ", start)
57 | slowRunningQueries := performancemetrics.PopulateSlowRunningMetrics(newConnection, pgIntegration, cp, enabledExtensions)
58 | log.Debug("PopulateSlowRunningMetrics completed in ", time.Since(start))
59 |
60 | start = time.Now()
61 | log.Debug("Starting PopulateWaitEventMetrics at ", start)
62 | _ = performancemetrics.PopulateWaitEventMetrics(newConnection, pgIntegration, cp, enabledExtensions)
63 | log.Debug("PopulateWaitEventMetrics completed in ", time.Since(start))
64 |
65 | start = time.Now()
66 | log.Debug("Starting PopulateBlockingMetrics at ", start)
67 | performancemetrics.PopulateBlockingMetrics(newConnection, pgIntegration, cp, enabledExtensions)
68 | log.Debug("PopulateBlockingMetrics completed in ", time.Since(start))
69 |
70 | start = time.Now()
71 | log.Debug("Starting PopulateIndividualQueryMetrics at ", start)
72 | individualQueries := performancemetrics.PopulateIndividualQueryMetrics(newConnection, slowRunningQueries, pgIntegration, cp, enabledExtensions)
73 | log.Debug("PopulateIndividualQueryMetrics completed in ", time.Since(start))
74 |
75 | start = time.Now()
76 | log.Debug("Starting PopulateExecutionPlanMetrics at ", start)
77 | performancemetrics.PopulateExecutionPlanMetrics(individualQueries, pgIntegration, cp, connectionInfo)
78 | log.Debug("PopulateExecutionPlanMetrics completed in ", time.Since(start))
79 | }
80 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/validations/performance_metrics_validations.go:
--------------------------------------------------------------------------------
1 | package validations
2 |
3 | import (
4 | "github.com/newrelic/infra-integrations-sdk/v3/log"
5 | performancedbconnection "github.com/newrelic/nri-postgresql/src/connection"
6 | commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils"
7 | )
8 |
9 | func FetchAllExtensions(conn *performancedbconnection.PGSQLConnection) (map[string]bool, error) {
10 | rows, err := conn.Queryx("SELECT extname FROM pg_extension")
11 | if err != nil {
12 | log.Error("Error executing query: ", err.Error())
13 | return nil, err
14 | }
15 | defer rows.Close()
16 | var enabledExtensions = make(map[string]bool)
17 | for rows.Next() {
18 | var extname string
19 | if err := rows.Scan(&extname); err != nil {
20 | log.Error("Error scanning rows: ", err.Error())
21 | return nil, err
22 | }
23 | enabledExtensions[extname] = true
24 | }
25 | return enabledExtensions, nil
26 | }
27 |
28 | func CheckSlowQueryMetricsFetchEligibility(enabledExtensions map[string]bool) (bool, error) {
29 | return enabledExtensions["pg_stat_statements"], nil
30 | }
31 |
32 | func CheckWaitEventMetricsFetchEligibility(enabledExtensions map[string]bool) (bool, error) {
33 | return enabledExtensions["pg_wait_sampling"] && enabledExtensions["pg_stat_statements"], nil
34 | }
35 |
36 | func CheckBlockingSessionMetricsFetchEligibility(enabledExtensions map[string]bool, version uint64) (bool, error) {
37 | // Version 12 and 13 do not require the pg_stat_statements extension
38 | if version == commonutils.PostgresVersion12 || version == commonutils.PostgresVersion13 {
39 | return true, nil
40 | }
41 | return enabledExtensions["pg_stat_statements"], nil
42 | }
43 |
44 | func CheckIndividualQueryMetricsFetchEligibility(enabledExtensions map[string]bool) (bool, error) {
45 | return enabledExtensions["pg_stat_monitor"], nil
46 | }
47 |
48 | func CheckPostgresVersionSupportForQueryMonitoring(version uint64) bool {
49 | return version >= commonutils.PostgresVersion12
50 | }
51 |
--------------------------------------------------------------------------------
/src/query-performance-monitoring/validations/performance_metrics_validations_test.go:
--------------------------------------------------------------------------------
1 | package validations
2 |
3 | import (
4 | "regexp"
5 | "testing"
6 |
7 | "github.com/newrelic/nri-postgresql/src/connection"
8 | "github.com/stretchr/testify/assert"
9 | "gopkg.in/DATA-DOG/go-sqlmock.v1"
10 | )
11 |
12 | func TestCheckBlockingSessionMetricsFetchEligibilityExtensionNotRequired(t *testing.T) {
13 | conn, mock := connection.CreateMockSQL(t)
14 | version := uint64(12)
15 | enabledExtensions, _ := FetchAllExtensions(conn)
16 | isExtensionEnabledTest, _ := CheckBlockingSessionMetricsFetchEligibility(enabledExtensions, version)
17 | assert.Equal(t, isExtensionEnabledTest, true)
18 | assert.NoError(t, mock.ExpectationsWereMet())
19 | }
20 |
21 | func TestCheckBlockingSessionMetricsFetchEligibilitySupportedVersionSuccess(t *testing.T) {
22 | conn, mock := connection.CreateMockSQL(t)
23 | version := uint64(14)
24 | validationQueryStatStatements := "SELECT extname FROM pg_extension"
25 | mock.ExpectQuery(regexp.QuoteMeta(validationQueryStatStatements)).WillReturnRows(sqlmock.NewRows([]string{"extname"}).AddRow("pg_stat_statements"))
26 | enabledExtensions, _ := FetchAllExtensions(conn)
27 | isExtensionEnabledTest, _ := CheckBlockingSessionMetricsFetchEligibility(enabledExtensions, version)
28 | assert.Equal(t, isExtensionEnabledTest, true)
29 | assert.NoError(t, mock.ExpectationsWereMet())
30 | }
31 |
32 | func TestCheckBlockingSessionMetricsFetchEligibilitySupportedVersionFail(t *testing.T) {
33 | conn, mock := connection.CreateMockSQL(t)
34 | version := uint64(14)
35 | validationQueryStatStatements := "SELECT extname FROM pg_extension"
36 | mock.ExpectQuery(regexp.QuoteMeta(validationQueryStatStatements)).WillReturnRows(sqlmock.NewRows([]string{"extname"}).AddRow("pg_stat_statements"))
37 | enabledExtensions, _ := FetchAllExtensions(conn)
38 | isExtensionEnabledTest, _ := CheckBlockingSessionMetricsFetchEligibility(enabledExtensions, version)
39 | assert.Equal(t, isExtensionEnabledTest, true)
40 | assert.NoError(t, mock.ExpectationsWereMet())
41 | }
42 |
43 | func TestIndividualQueryMetricsFetchEligibilitySupportedVersionSuccess(t *testing.T) {
44 | conn, mock := connection.CreateMockSQL(t)
45 | validationQueryStatStatements := "SELECT extname FROM pg_extension"
46 | mock.ExpectQuery(regexp.QuoteMeta(validationQueryStatStatements)).WillReturnRows(sqlmock.NewRows([]string{"extname"}).AddRow("pg_stat_monitor"))
47 | enabledExtensions, _ := FetchAllExtensions(conn)
48 | isExtensionEnabledTest, _ := CheckIndividualQueryMetricsFetchEligibility(enabledExtensions)
49 | assert.Equal(t, isExtensionEnabledTest, true)
50 | assert.NoError(t, mock.ExpectationsWereMet())
51 | }
52 |
53 | func TestIndividualQueryMetricsFetchEligibilitySupportedVersionFail(t *testing.T) {
54 | conn, mock := connection.CreateMockSQL(t)
55 | validationQueryStatStatements := "SELECT extname FROM pg_extension"
56 | mock.ExpectQuery(regexp.QuoteMeta(validationQueryStatStatements)).WillReturnRows(sqlmock.NewRows([]string{"extname"}))
57 | enabledExtensions, _ := FetchAllExtensions(conn)
58 | isExtensionEnabledTest, _ := CheckIndividualQueryMetricsFetchEligibility(enabledExtensions)
59 | assert.Equal(t, isExtensionEnabledTest, false)
60 | assert.NoError(t, mock.ExpectationsWereMet())
61 | }
62 |
63 | func TestCheckWaitEventMetricsFetchEligibility(t *testing.T) {
64 | validationQuery := "SELECT extname FROM pg_extension"
65 | testCases := []struct {
66 | waitExt string
67 | statExt string
68 | expected bool
69 | }{
70 | {"pg_wait_sampling", "pg_stat_statements", true}, // Success
71 | {"pg_wait_sampling", "", false}, // Fail V1
72 | {"", "pg_stat_statements", false}, // Fail V2
73 | }
74 |
75 | conn, mock := connection.CreateMockSQL(t)
76 | for _, tc := range testCases {
77 | mock.ExpectQuery(regexp.QuoteMeta(validationQuery)).WillReturnRows(sqlmock.NewRows([]string{"extname"}).AddRow(tc.waitExt).AddRow(tc.statExt))
78 | enabledExtensions, _ := FetchAllExtensions(conn)
79 | isExtensionEnabledTest, _ := CheckWaitEventMetricsFetchEligibility(enabledExtensions)
80 | assert.Equal(t, isExtensionEnabledTest, tc.expected)
81 | assert.NoError(t, mock.ExpectationsWereMet())
82 | }
83 | }
84 |
85 | func TestCheckSlowQueryMetricsFetchEligibilitySupportedVersionSuccess(t *testing.T) {
86 | conn, mock := connection.CreateMockSQL(t)
87 | validationQueryStatStatements := "SELECT extname FROM pg_extension"
88 | mock.ExpectQuery(regexp.QuoteMeta(validationQueryStatStatements)).WillReturnRows(sqlmock.NewRows([]string{"extname"}).AddRow("pg_stat_statements"))
89 | enabledExtensions, _ := FetchAllExtensions(conn)
90 | isExtensionEnabledTest, _ := CheckSlowQueryMetricsFetchEligibility(enabledExtensions)
91 | assert.Equal(t, isExtensionEnabledTest, true)
92 | assert.NoError(t, mock.ExpectationsWereMet())
93 | }
94 |
95 | func TestCheckSlowQueryMetricsFetchEligibilitySupportedVersionFail(t *testing.T) {
96 | conn, mock := connection.CreateMockSQL(t)
97 | validationQueryStatStatements := "SELECT extname FROM pg_extension"
98 | mock.ExpectQuery(regexp.QuoteMeta(validationQueryStatStatements)).WillReturnRows(sqlmock.NewRows([]string{"extname"}))
99 | enabledExtensions, _ := FetchAllExtensions(conn)
100 | isExtensionEnabledTest, _ := CheckSlowQueryMetricsFetchEligibility(enabledExtensions)
101 | assert.Equal(t, isExtensionEnabledTest, false)
102 | assert.NoError(t, mock.ExpectationsWereMet())
103 | }
104 |
--------------------------------------------------------------------------------
/tests/README.md:
--------------------------------------------------------------------------------
1 | # Integration tests
2 |
3 | Steps to update the integration tests for the latest supported version:
4 |
5 | 1. Update the postgres image in the `postgres-latest-supported` of the [docker compose](./docker-compose.yml).
6 | 2. Execute the integration tests
7 | * If the JSON-schema validation fails:
8 | - Check the inventory, some server settings might have been removed.
9 | - Check the number of entities: the number of internal tables and or indexes may vary (metrics failures).
10 | - Check the release notes ([Postgres 16 example](https://www.postgresql.org/docs/release/16.0/))
11 | 3. Once the failures are understood (if any), update the corresponding JSON-schema files, you may need to generate it
12 | using the integration output, specially if there is any metric failure.
13 |
14 | # Testing pgbouncer upgrades
15 |
16 | Steps to test breaking metrics changes that happen when new pgbouncer versions are released:
17 |
18 | 1. Update the `db` image and `pgbouncer` images in [docker compose](./docker-compose-pgbouncer.yml).
19 | 2. Use the command `docker compose -f ./docker-compose-pgbouncer.yml up` to get the environment running
20 | 3. Run the integration with `go run ./src/main.go -pgbouncer -username {USERNAME} -password {PASSWORD} -p 5432 -pretty > pgbouncer_output.json`
21 | * If the terminal logs errors:
22 | - Check which query is failing
23 | - Explore pgbouncer release notes for changes to the `STATS` and `POOLS` tables
24 | - Add or remove metrics to make the query succeed
25 | - Modify tests to check for the new metrics in the latest versions
26 | * No errors:
27 | - Take a look at `pgbouncer_output.json` and look at the pgbouncer entities and check if metrics are reported correctly.
28 | - If metrics are incorrectly reported, go back and look at where queries might be failing.
--------------------------------------------------------------------------------
/tests/docker-compose-performance.yml:
--------------------------------------------------------------------------------
1 | services:
2 |
3 | postgres13:
4 | build:
5 | context: ./perf-testing/oldest_supported/
6 | dockerfile: Dockerfile
7 | container_name: "postgresql-perf-oldest"
8 | restart: always
9 | environment:
10 | - POSTGRES_USER=dbuser
11 | - POSTGRES_PASSWORD=dbpassword
12 | - POSTGRES_DB=demo
13 | volumes:
14 | - postgres13:/var/lib/postgresql/data
15 | ports:
16 | - "6432:5432"
17 | healthcheck:
18 | test: ["CMD-SHELL", "pg_isready -U postgres"]
19 | interval: 10s
20 | timeout: 5s
21 | retries: 5
22 |
23 | postgresql-latest:
24 | build:
25 | context: ./perf-testing/oldest_supported/
26 | dockerfile: Dockerfile
27 | restart: always
28 | container_name: "postgresql-perf-latest"
29 | environment:
30 | - POSTGRES_USER=dbuser
31 | - POSTGRES_PASSWORD=dbpassword
32 | - POSTGRES_DB=demo
33 | volumes:
34 | - pgdata_latest:/var/lib/postgresql/data
35 | ports:
36 | - "5432:5432"
37 | healthcheck:
38 | test: ["CMD-SHELL", "pg_isready -U postgres"]
39 | interval: 10s
40 | timeout: 5s
41 | retries: 5
42 |
43 | postgres-without-extensions:
44 | image: postgres:17.0
45 | restart: always
46 | container_name: "postgresql-noext"
47 | environment:
48 | - POSTGRES_USER=dbuser
49 | - POSTGRES_PASSWORD=dbpassword
50 | - POSTGRES_DB=demo
51 | volumes:
52 | - pgdata_noext:/var/lib/postgresql/data
53 | ports:
54 | - "7432:5432"
55 | healthcheck:
56 | test: ["CMD-SHELL", "pg_isready -U postgres"]
57 | interval: 10s
58 | timeout: 5s
59 | retries: 5
60 |
61 | nri-postgresql:
62 | container_name: nri_postgresql
63 | build:
64 | context: ../
65 | dockerfile: tests/perf-testing/integration/Dockerfile
66 |
67 | volumes:
68 | pgdata_latest:
69 | postgres13:
70 | pgdata_noext:
71 |
--------------------------------------------------------------------------------
/tests/docker-compose-pgbouncer.yml:
--------------------------------------------------------------------------------
1 | services:
2 | db:
3 | container_name: db
4 | image: postgres:16-alpine
5 | volumes:
6 | - pg_data:/var/lib/postgresql/data
7 | environment:
8 | - POSTGRES_USER=postgres
9 | - POSTGRES_PASSWORD=hbZkzny5xrvVH
10 | healthcheck:
11 | test: ['CMD', 'pg_isready', '-U', 'postgres']
12 |
13 | pgbouncer:
14 | container_name: pgbouncer
15 | image: edoburu/pgbouncer:latest
16 | environment:
17 | - DB_USER=postgres
18 | - DB_PASSWORD=hbZkzny5xrvVH
19 | - DB_HOST=db
20 | # - DB_NAME=test
21 | - AUTH_TYPE=scram-sha-256
22 | - POOL_MODE=transaction
23 | - ADMIN_USERS=postgres,dbuser
24 | ports:
25 | - "5432:5432"
26 | depends_on:
27 | - db
28 | healthcheck:
29 | test: ['CMD', 'pg_isready', '-h', 'localhost']
30 |
31 | volumes:
32 | pg_data:
--------------------------------------------------------------------------------
/tests/docker-compose.yml:
--------------------------------------------------------------------------------
1 | services:
2 | postgres-9-6:
3 | image: postgres:9.6
4 | restart: always
5 | container_name: postgres-9-6
6 | environment:
7 | POSTGRES_USER: postgres
8 | POSTGRES_PASSWORD: example
9 | POSTGRES_DB: demo
10 |
11 | postgres-latest-supported:
12 | image: postgres:17.0
13 | restart: always
14 | container_name: postgres-latest-supported
15 | environment:
16 | POSTGRES_USER: postgres
17 | POSTGRES_PASSWORD: example
18 | POSTGRES_DB: demo
19 |
20 | nri-postgresql:
21 | container_name: nri-postgresql
22 | build:
23 | context: ../
24 | dockerfile: tests/perf-testing/integration/Dockerfile
25 |
--------------------------------------------------------------------------------
/tests/perf-testing/integration/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM golang:1.23.5-bookworm as builder
2 | ARG CGO_ENABLED=0
3 | WORKDIR /go/src/github.com/newrelic/nri-postgresql
4 | COPY . .
5 | RUN make clean compile
6 |
7 | FROM alpine:latest
8 | COPY --from=builder /go/src/github.com/newrelic/nri-postgresql/bin /
9 | CMD ["sleep", "1h"]
--------------------------------------------------------------------------------
/tests/perf-testing/latest_supported/01-init-extensions.sql:
--------------------------------------------------------------------------------
1 | CREATE EXTENSION IF NOT EXISTS pg_stat_statements;
2 |
3 | CREATE EXTENSION IF NOT EXISTS pg_wait_sampling;
4 |
5 | CREATE EXTENSION IF NOT EXISTS pg_stat_monitor;
--------------------------------------------------------------------------------
/tests/perf-testing/latest_supported/02-create-database.sql:
--------------------------------------------------------------------------------
1 | CREATE DATABASE titanic;
--------------------------------------------------------------------------------
/tests/perf-testing/latest_supported/03-import-data.sql:
--------------------------------------------------------------------------------
1 | -- Connect to titanic database
2 | \c titanic;
3 |
4 | -- Import the titanic.sql file that was downloaded during Docker build
5 | \i /docker-entrypoint-initdb.d/titanic.sql;
6 |
7 | GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO dbuser;
8 | GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO dbuser;
9 |
10 | -- Analyze tables for better query planning
11 | ANALYZE VERBOSE;
12 |
--------------------------------------------------------------------------------
/tests/perf-testing/latest_supported/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM postgres:17.0
2 |
3 | # Dependencies
4 | RUN apt-get update && apt-get install -y \
5 | build-essential \
6 | git \
7 | wget \
8 | postgresql-server-dev-17 \
9 | && rm -rf /var/lib/apt/lists/*
10 |
11 | # Postgres Docker Images copy contents of postgresql.conf.sample to postgresql.conf during initialization
12 | # COPY custom.conf /usr/share/postgresql/postgresql.conf.sample -- DO NOT USE
13 | RUN echo "shared_preload_libraries = 'pg_stat_statements,pg_wait_sampling,pg_stat_monitor'" >> /usr/share/postgresql/postgresql.conf.sample
14 | RUN echo "pg_stat_statements.track = all" >> /usr/share/postgresql/postgresql.conf.sample
15 | RUN echo "pg_stat_statements.save = on" >> /usr/share/postgresql/postgresql.conf.sample
16 | RUN echo "pg_stat_monitor.pgsm_enable_query_plan = on" >> /usr/share/postgresql/postgresql.conf.sample
17 |
18 | # Install pg_wait_sampling
19 | RUN git clone https://github.com/postgrespro/pg_wait_sampling.git \
20 | && cd pg_wait_sampling \
21 | && make USE_PGXS=1 \
22 | && make USE_PGXS=1 install \
23 | && cd .. \
24 | && rm -rf pg_wait_sampling
25 |
26 | # Install pg_stat_monitor
27 | RUN git clone https://github.com/percona/pg_stat_monitor.git \
28 | && cd pg_stat_monitor \
29 | && make USE_PGXS=1 \
30 | && make USE_PGXS=1 install \
31 | && cd .. \
32 | && rm -rf pg_stat_monitor
33 |
34 | # Download the titanic database
35 | RUN wget https://raw.githubusercontent.com/neondatabase/postgres-sample-dbs/main/titanic.sql -P /docker-entrypoint-initdb.d/
36 |
37 | # Enable the extensions and setup the titanic database
38 | COPY 01-init-extensions.sql /docker-entrypoint-initdb.d/01-init-extensions.sql
39 | COPY 02-create-database.sql /docker-entrypoint-initdb.d/02-create-database.sql
40 | COPY 03-import-data.sql /docker-entrypoint-initdb.d/03-import-data.sql
--------------------------------------------------------------------------------
/tests/perf-testing/oldest_supported/01-init-extensions.sql:
--------------------------------------------------------------------------------
1 | CREATE EXTENSION IF NOT EXISTS pg_stat_statements;
2 |
3 | CREATE EXTENSION IF NOT EXISTS pg_wait_sampling;
4 |
5 | CREATE EXTENSION IF NOT EXISTS pg_stat_monitor;
--------------------------------------------------------------------------------
/tests/perf-testing/oldest_supported/02-create-database.sql:
--------------------------------------------------------------------------------
1 | CREATE DATABASE titanic;
--------------------------------------------------------------------------------
/tests/perf-testing/oldest_supported/03-import-data.sql:
--------------------------------------------------------------------------------
1 | -- Connect to titanic database
2 | \c titanic;
3 |
4 | -- Import the titanic.sql file that was downloaded during Docker build
5 | \i /docker-entrypoint-initdb.d/titanic.sql;
6 |
7 | GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO dbuser;
8 | GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO dbuser;
9 |
10 | -- Analyze tables for better query planning
11 | ANALYZE VERBOSE;
12 |
--------------------------------------------------------------------------------
/tests/perf-testing/oldest_supported/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM postgres:13
2 |
3 | # Dependencies
4 | RUN apt-get update && apt-get install -y \
5 | build-essential \
6 | git \
7 | wget \
8 | postgresql-server-dev-13 \
9 | && rm -rf /var/lib/apt/lists/*
10 |
11 | # Postgres Docker Images copy contents of postgresql.conf.sample to postgresql.conf during initialization
12 | # COPY custom.conf /usr/share/postgresql/postgresql.conf.sample -- DO NOT USE
13 | RUN echo "shared_preload_libraries = 'pg_stat_statements,pg_wait_sampling,pg_stat_monitor'" >> /usr/share/postgresql/postgresql.conf.sample
14 | RUN echo "pg_stat_statements.track = all" >> /usr/share/postgresql/postgresql.conf.sample
15 | RUN echo "pg_stat_statements.save = on" >> /usr/share/postgresql/postgresql.conf.sample
16 | RUN echo "pg_stat_monitor.pgsm_enable_query_plan = on" >> /usr/share/postgresql/postgresql.conf.sample
17 |
18 | # Install pg_wait_sampling
19 | RUN git clone https://github.com/postgrespro/pg_wait_sampling.git \
20 | && cd pg_wait_sampling \
21 | && make USE_PGXS=1 \
22 | && make USE_PGXS=1 install \
23 | && cd .. \
24 | && rm -rf pg_wait_sampling
25 |
26 | # Install pg_stat_monitor
27 | RUN git clone https://github.com/percona/pg_stat_monitor.git \
28 | && cd pg_stat_monitor \
29 | && make USE_PGXS=1 \
30 | && make USE_PGXS=1 install \
31 | && cd .. \
32 | && rm -rf pg_stat_monitor
33 |
34 | # Download the titanic database
35 | RUN wget https://raw.githubusercontent.com/neondatabase/postgres-sample-dbs/main/titanic.sql -P /docker-entrypoint-initdb.d/
36 |
37 | # Enable the extensions and setup the titanic database
38 | COPY 01-init-extensions.sql /docker-entrypoint-initdb.d/01-init-extensions.sql
39 | COPY 02-create-database.sql /docker-entrypoint-initdb.d/02-create-database.sql
40 | COPY 03-import-data.sql /docker-entrypoint-initdb.d/03-import-data.sql
--------------------------------------------------------------------------------
/tests/postgresql_test.go:
--------------------------------------------------------------------------------
1 | //go:build integration
2 |
3 | package tests
4 |
5 | import (
6 | "flag"
7 | "os"
8 | "testing"
9 |
10 | "github.com/newrelic/nri-postgresql/tests/simulation"
11 | "github.com/stretchr/testify/assert"
12 | )
13 |
14 | var (
15 | defaultPassword = flag.String("password", "example", "Default password for postgres")
16 | defaultUser = flag.String("username", "postgres", "Default username for postgres")
17 | defaultDB = flag.String("database", "demo", "Default database name")
18 | container = flag.String("container", "nri-postgresql", "Container name for the integration")
19 | )
20 |
21 | const (
22 | // docker compose service names
23 | serviceNamePostgres96 = "postgres-9-6"
24 | serviceNamePostgresLatest = "postgres-latest-supported"
25 | defaultBinaryPath = "/nri-postgresql"
26 | integrationContainer = "nri-postgresql"
27 | )
28 |
29 | func TestMain(m *testing.M) {
30 | flag.Parse()
31 | result := m.Run()
32 | os.Exit(result)
33 | }
34 |
35 | func TestSuccessConnection(t *testing.T) {
36 | t.Parallel()
37 | testCases := []struct {
38 | Name string
39 | Hostname string
40 | Schema string
41 | ExtraFlags []string
42 | }{
43 | {
44 | Name: "Testing Metrics and inventory for Postgres v9.6.x",
45 | Hostname: serviceNamePostgres96,
46 | Schema: "jsonschema-latest.json",
47 | },
48 | {
49 | Name: "Testing Metrics and inventory for latest Postgres supported version",
50 | Hostname: serviceNamePostgresLatest,
51 | Schema: "jsonschema-latest.json",
52 | },
53 | {
54 | Name: "Inventory only for latest Postgres supported version",
55 | Hostname: serviceNamePostgresLatest,
56 | Schema: "jsonschema-inventory-latest.json",
57 | ExtraFlags: []string{`-inventory=true`},
58 | },
59 | }
60 |
61 | for _, tc := range testCases {
62 | tc := tc
63 | t.Run(tc.Name, func(t *testing.T) {
64 | t.Parallel()
65 | args := append([]string{`-collection_list=all`}, tc.ExtraFlags...)
66 | stdout, stderr, err := simulation.RunIntegration(tc.Hostname, integrationContainer, defaultBinaryPath, defaultUser, defaultPassword, defaultDB, args...)
67 | assert.Empty(t, stderr)
68 | assert.NoError(t, err)
69 | assert.NotEmpty(t, stdout)
70 | err = simulation.ValidateJSONSchema(tc.Schema, stdout)
71 | assert.NoError(t, err)
72 | })
73 | }
74 | }
75 |
76 | func TestMissingRequiredVars(t *testing.T) {
77 | // Temporarily set username and password to nil to test missing credentials
78 | origUser, origPsw := defaultUser, defaultPassword
79 | defaultUser, defaultPassword = nil, nil
80 | defer func() {
81 | defaultUser, defaultPassword = origUser, origPsw
82 | }()
83 |
84 | _, stderr, err := simulation.RunIntegration(serviceNamePostgresLatest, integrationContainer, defaultBinaryPath, defaultUser, defaultPassword, defaultDB)
85 | assert.Error(t, err)
86 | assert.Contains(t, stderr, "invalid configuration: must specify a username and password")
87 | }
88 |
89 | func TestIgnoringDB(t *testing.T) {
90 | args := []string{
91 | `-collection_list=all`,
92 | `-collection_ignore_database_list=["demo"]`,
93 | }
94 | stdout, stderr, err := simulation.RunIntegration(serviceNamePostgresLatest, integrationContainer, defaultBinaryPath, defaultUser, defaultPassword, defaultDB, args...)
95 | assert.NoError(t, err)
96 | assert.Empty(t, stderr)
97 | assert.Contains(t, stdout, `"database:postgres"`)
98 | assert.NotContains(t, stdout, `"database:demo"`)
99 | }
100 |
--------------------------------------------------------------------------------
/tests/simulation/helpers.go:
--------------------------------------------------------------------------------
1 | //nolint:all
2 | package simulation
3 |
4 | import (
5 | "bytes"
6 | "fmt"
7 | "os"
8 | "os/exec"
9 | "path/filepath"
10 | "strings"
11 |
12 | "github.com/newrelic/infra-integrations-sdk/v3/log"
13 | "github.com/xeipuuv/gojsonschema"
14 | )
15 |
16 | // ExecInContainer executes a command in a specified container
17 | func ExecInContainer(container string, command []string, envVars ...string) (string, string, error) {
18 | cmdLine := make([]string, 0, 3+len(command))
19 | cmdLine = append(cmdLine, "exec", "-i")
20 |
21 | for _, envVar := range envVars {
22 | cmdLine = append(cmdLine, "-e", envVar)
23 | }
24 |
25 | cmdLine = append(cmdLine, container)
26 | cmdLine = append(cmdLine, command...)
27 |
28 | log.Debug("executing: docker %s", strings.Join(cmdLine, " "))
29 |
30 | cmd := exec.Command("docker", cmdLine...)
31 |
32 | var outbuf, errbuf bytes.Buffer
33 | cmd.Stdout = &outbuf
34 | cmd.Stderr = &errbuf
35 |
36 | err := cmd.Run()
37 | stdout := outbuf.String()
38 | stderr := errbuf.String()
39 |
40 | if err != nil {
41 | return stdout, stderr, err
42 | }
43 |
44 | return stdout, stderr, nil
45 | }
46 |
47 | // RunIntegration executes the integration binary with the provided arguments
48 | func RunIntegration(targetContainer, integrationContainer, binaryPath string, username, password *string, database *string, args ...string) (string, string, error) {
49 | command := []string{binaryPath}
50 |
51 | if username != nil {
52 | command = append(command, "-username", *username)
53 | }
54 | if password != nil {
55 | command = append(command, "-password", *password)
56 | }
57 |
58 | // Always use port 5432 for integration runs
59 | command = append(command, "-port", "5432")
60 |
61 | if database != nil {
62 | command = append(command, "-database", *database)
63 | }
64 | if targetContainer != "" {
65 | command = append(command, "-hostname", targetContainer)
66 | }
67 |
68 | for _, arg := range args {
69 | command = append(command, arg)
70 | }
71 |
72 | stdout, stderr, err := ExecInContainer(integrationContainer, command)
73 | if stderr != "" {
74 | log.Debug("Integration command Standard Error: ", stderr)
75 | }
76 |
77 | return stdout, stderr, err
78 | }
79 |
80 | // ValidateJSONSchema validates a JSON string against a schema file
81 | func ValidateJSONSchema(fileName string, input string) error {
82 | pwd, err := os.Getwd()
83 | if err != nil {
84 | log.Error(err.Error())
85 | return err
86 | }
87 |
88 | schemaURI := fmt.Sprintf("file://%s", filepath.Join(pwd, "testdata", fileName))
89 | log.Info("loading schema from %s", schemaURI)
90 |
91 | schemaLoader := gojsonschema.NewReferenceLoader(schemaURI)
92 | documentLoader := gojsonschema.NewStringLoader(input)
93 |
94 | result, err := gojsonschema.Validate(schemaLoader, documentLoader)
95 | if err != nil {
96 | return fmt.Errorf("error loading JSON schema: %v", err)
97 | }
98 |
99 | if result.Valid() {
100 | return nil
101 | }
102 |
103 | fmt.Printf("Errors for JSON schema: '%s'\n", schemaURI)
104 | for _, desc := range result.Errors() {
105 | fmt.Printf("\t- %s\n", desc)
106 | }
107 | fmt.Println()
108 |
109 | return fmt.Errorf("the output of the integration doesn't have expected JSON format")
110 | }
111 |
112 | // GetSchemaFileName returns the appropriate schema filename for a given sample type
113 | func GetSchemaFileName(sampleType string) string {
114 | schemaMap := map[string]string{
115 | "PostgresqlInstanceSample": "jsonschema-latest.json",
116 | "PostgresSlowQueries": "slow-queries-schema.json",
117 | "PostgresWaitEvents": "wait-events-schema.json",
118 | "PostgresBlockingSessions": "blocking-sessions-schema.json",
119 | "PostgresIndividualQueries": "individual-queries-schema.json",
120 | "PostgresExecutionPlanMetrics": "execution-plan-schema.json",
121 | }
122 | return schemaMap[sampleType]
123 | }
124 |
--------------------------------------------------------------------------------
/tests/testdata/blocking-sessions-schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-07/schema#",
3 | "type": "object",
4 | "required": ["name", "protocol_version", "integration_version", "data"],
5 | "properties": {
6 | "name": {
7 | "type": "string",
8 | "const": "com.newrelic.postgresql"
9 | },
10 | "protocol_version": {
11 | "type": "string"
12 | },
13 | "integration_version": {
14 | "type": "string"
15 | },
16 | "data": {
17 | "type": "array",
18 | "items": {
19 | "type": "object",
20 | "required": ["entity", "metrics", "inventory", "events"],
21 | "properties": {
22 | "entity": {
23 | "type": "object",
24 | "required": ["name", "type", "id_attributes"],
25 | "properties": {
26 | "name": {
27 | "type": "string"
28 | },
29 | "type": {
30 | "type": "string",
31 | "const": "pg-instance"
32 | },
33 | "id_attributes": {
34 | "type": "array"
35 | }
36 | }
37 | },
38 | "metrics": {
39 | "type": "array",
40 | "items": {
41 | "type": "object",
42 | "required": [
43 | "blocked_pid",
44 | "blocked_query",
45 | "blocked_query_start",
46 | "blocking_pid",
47 | "blocking_query",
48 | "blocking_query_start",
49 | "database_name",
50 | "event_type"
51 | ],
52 | "properties": {
53 | "blocked_pid": {
54 | "type": "integer",
55 | "minimum": 0
56 | },
57 | "blocked_query": {
58 | "type": "string"
59 | },
60 | "blocked_query_start": {
61 | "type": "string",
62 | "format": "date-time"
63 | },
64 | "blocking_pid": {
65 | "type": "integer",
66 | "minimum": 0
67 | },
68 | "blocking_query": {
69 | "type": "string"
70 | },
71 | "blocking_query_start": {
72 | "type": "string",
73 | "format": "date-time"
74 | },
75 | "database_name": {
76 | "type": "string"
77 | },
78 | "event_type": {
79 | "type": "string",
80 | "const": "PostgresBlockingSessions"
81 | }
82 | },
83 | "additionalProperties": false
84 | }
85 | },
86 | "inventory": {
87 | "type": "object"
88 | },
89 | "events": {
90 | "type": "array"
91 | }
92 | },
93 | "additionalProperties": false
94 | }
95 | }
96 | },
97 | "additionalProperties": false
98 | }
--------------------------------------------------------------------------------
/tests/testdata/individual-queries-schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-07/schema#",
3 | "type": "object",
4 | "required": [
5 | "name",
6 | "protocol_version",
7 | "integration_version",
8 | "data"
9 | ],
10 | "properties": {
11 | "name": {
12 | "type": "string",
13 | "const": "com.newrelic.postgresql"
14 | },
15 | "protocol_version": {
16 | "type": "string"
17 | },
18 | "integration_version": {
19 | "type": "string"
20 | },
21 | "data": {
22 | "type": "array",
23 | "items": {
24 | "type": "object",
25 | "required": [
26 | "entity",
27 | "metrics",
28 | "inventory",
29 | "events"
30 | ],
31 | "properties": {
32 | "entity": {
33 | "type": "object",
34 | "required": [
35 | "name",
36 | "type",
37 | "id_attributes"
38 | ],
39 | "properties": {
40 | "name": {
41 | "type": "string"
42 | },
43 | "type": {
44 | "type": "string",
45 | "const": "pg-instance"
46 | },
47 | "id_attributes": {
48 | "type": "array"
49 | }
50 | }
51 | },
52 | "metrics": {
53 | "type": "array",
54 | "items": {
55 | "type": "object",
56 | "required": [
57 | "event_type",
58 | "query_id",
59 | "query_text",
60 | "database_name",
61 | "plan_id",
62 | "exec_time_ms"
63 | ],
64 | "properties": {
65 | "cpu_time_ms": {
66 | "type": "number",
67 | "minimum": 0
68 | },
69 | "exec_time_ms": {
70 | "type": "number",
71 | "minimum": 0
72 | },
73 | "database_name": {
74 | "type": "string"
75 | },
76 | "event_type": {
77 | "type": "string",
78 | "const": "PostgresIndividualQueries"
79 | },
80 | "plan_id": {
81 | "type": "string"
82 | },
83 | "query_id": {
84 | "type": "string"
85 | },
86 | "query_text": {
87 | "type": "string"
88 | }
89 | },
90 | "additionalProperties": false
91 | }
92 | },
93 | "inventory": {
94 | "type": "object"
95 | },
96 | "events": {
97 | "type": "array"
98 | }
99 | },
100 | "additionalProperties": false
101 | }
102 | }
103 | },
104 | "additionalProperties": false
105 | }
--------------------------------------------------------------------------------
/tests/testdata/slow-queries-schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-07/schema#",
3 | "type": "object",
4 | "required": [
5 | "name",
6 | "protocol_version",
7 | "integration_version",
8 | "data"
9 | ],
10 | "properties": {
11 | "name": {
12 | "type": "string",
13 | "const": "com.newrelic.postgresql"
14 | },
15 | "protocol_version": {
16 | "type": "string"
17 | },
18 | "integration_version": {
19 | "type": "string"
20 | },
21 | "data": {
22 | "type": "array",
23 | "items": {
24 | "type": "object",
25 | "required": [
26 | "entity",
27 | "metrics",
28 | "inventory",
29 | "events"
30 | ],
31 | "properties": {
32 | "entity": {
33 | "type": "object",
34 | "required": [
35 | "name",
36 | "type",
37 | "id_attributes"
38 | ],
39 | "properties": {
40 | "name": {
41 | "type": "string"
42 | },
43 | "type": {
44 | "type": "string",
45 | "const": "pg-instance"
46 | },
47 | "id_attributes": {
48 | "type": "array"
49 | }
50 | }
51 | },
52 | "metrics": {
53 | "type": "array",
54 | "items": {
55 | "type": "object",
56 | "required": [
57 | "event_type",
58 | "query_id",
59 | "query_text",
60 | "database_name",
61 | "avg_elapsed_time_ms",
62 | "execution_count",
63 | "collection_timestamp"
64 | ],
65 | "properties": {
66 | "avg_disk_reads": {
67 | "type": "integer",
68 | "minimum": 0
69 | },
70 | "avg_disk_writes": {
71 | "type": "integer",
72 | "minimum": 0
73 | },
74 | "avg_elapsed_time_ms": {
75 | "type": "number",
76 | "minimum": 0
77 | },
78 | "collection_timestamp": {
79 | "type": "string",
80 | "format": "date-time"
81 | },
82 | "database_name": {
83 | "type": "string"
84 | },
85 | "event_type": {
86 | "type": "string",
87 | "const": "PostgresSlowQueries"
88 | },
89 | "execution_count": {
90 | "type": "integer",
91 | "minimum": 0
92 | },
93 | "query_id": {
94 | "type": "string"
95 | },
96 | "query_text": {
97 | "type": "string"
98 | },
99 | "schema_name": {
100 | "type": "string"
101 | },
102 | "statement_type": {
103 | "type": "string"
104 | }
105 | },
106 | "additionalProperties": false
107 | }
108 | },
109 | "inventory": {
110 | "type": "object"
111 | },
112 | "events": {
113 | "type": "array"
114 | }
115 | },
116 | "additionalProperties": false
117 | }
118 | }
119 | },
120 | "additionalProperties": false
121 | }
--------------------------------------------------------------------------------
/tests/testdata/wait-events-schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-07/schema#",
3 | "type": "object",
4 | "required": ["name", "protocol_version", "integration_version", "data"],
5 | "properties": {
6 | "name": {
7 | "type": "string",
8 | "const": "com.newrelic.postgresql"
9 | },
10 | "protocol_version": {
11 | "type": "string"
12 | },
13 | "integration_version": {
14 | "type": "string"
15 | },
16 | "data": {
17 | "type": "array",
18 | "items": {
19 | "type": "object",
20 | "required": ["entity", "metrics", "inventory", "events"],
21 | "properties": {
22 | "entity": {
23 | "type": "object",
24 | "required": ["name", "type", "id_attributes"],
25 | "properties": {
26 | "name": {
27 | "type": "string"
28 | },
29 | "type": {
30 | "type": "string",
31 | "const": "pg-instance"
32 | },
33 | "id_attributes": {
34 | "type": "array"
35 | }
36 | }
37 | },
38 | "metrics": {
39 | "type": "array",
40 | "items": {
41 | "type": "object",
42 | "required": [
43 | "collection_timestamp",
44 | "database_name",
45 | "event_type",
46 | "query_id",
47 | "query_text",
48 | "wait_category",
49 | "wait_event_name"
50 | ],
51 | "properties": {
52 | "collection_timestamp": {
53 | "type": "string",
54 | "format": "date-time"
55 | },
56 | "database_name": {
57 | "type": "string"
58 | },
59 | "event_type": {
60 | "type": "string",
61 | "const": "PostgresWaitEvents"
62 | },
63 | "query_id": {
64 | "type": "string"
65 | },
66 | "query_text": {
67 | "type": "string"
68 | },
69 | "total_wait_time_ms": {
70 | "type": "number",
71 | "minimum": 0
72 | },
73 | "wait_category": {
74 | "type": "string"
75 | },
76 | "wait_event_name": {
77 | "type": "string"
78 | }
79 | },
80 | "additionalProperties": false
81 | }
82 | },
83 | "inventory": {
84 | "type": "object"
85 | },
86 | "events": {
87 | "type": "array"
88 | }
89 | },
90 | "additionalProperties": false
91 | }
92 | }
93 | },
94 | "additionalProperties": false
95 | }
--------------------------------------------------------------------------------