diff --git a/.eslintignore b/.eslintignore
deleted file mode 100644
index 1537943979..0000000000
--- a/.eslintignore
+++ /dev/null
@@ -1,2 +0,0 @@
-**/node_modules/**
-docs/
diff --git a/.eslintrc.js b/.eslintrc.js
deleted file mode 100644
index 684333e675..0000000000
--- a/.eslintrc.js
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright 2020 New Relic Corporation. All rights reserved.
- * SPDX-License-Identifier: Apache-2.0
- */
-
-'use strict'
-
-module.exports = {
- extends: ['@newrelic', 'plugin:jsdoc/recommended'],
- plugins: ['jsdoc'],
- rules: {
- 'consistent-return': 'off',
- 'jsdoc/require-jsdoc': 'off',
- 'jsdoc/tag-lines': 'off',
- 'jsdoc/check-types': 'off',
- 'jsdoc/no-undefined-types': [
- 'warn',
- {
- definedTypes: [
- 'Logger',
- 'Agent',
- 'Shim',
- 'MessageShim',
- 'TraceSegment',
- 'Transaction',
- 'Tracer',
- 'Exception',
- 'MetricAggregator',
- 'EventEmitter'
- ]
- }
- ]
- },
- parserOptions: {
- ecmaVersion: 2022
- },
- ignorePatterns: [
- 'test/versioned-external',
- 'test/versioned/nextjs/app',
- 'test/versioned/nextjs/app-dir'
- ],
- overrides: [
- {
- files: ['**/*.mjs'],
- parserOptions: {
- sourceType: 'module'
- },
- rules: {
- // TODO: remove this when we decide on how to address
- // here: https://issues.newrelic.com/browse/NEWRELIC-3321
- 'node/no-unsupported-features/es-syntax': 'off'
- }
- },
- {
- files: ['newrelic.js'],
- rules: {
- 'header/header': ['off']
- }
- },
- {
- files: ['./lib/shim/*.js', 'lib/transaction/handle.js', 'api.js'],
- rules: {
- 'jsdoc/require-jsdoc': 'warn'
- }
- }
- ]
-}
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index b35b75e596..818ab074dc 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -53,7 +53,7 @@ In general, we try to limit adding third-party production dependencies. If one i
### Coding Style Guidelines/Conventions
-We use eslint to enforce certain coding standards. Please see our [.eslintrc](./.eslintrc.js) file for specific rule configuration.
+We use eslint to enforce certain coding standards. Please see our [eslint.config.js](./eslint.config.js) file for specific rule configuration.
### Commit Guidelines
diff --git a/THIRD_PARTY_NOTICES.md b/THIRD_PARTY_NOTICES.md
index 55e28560de..bb91eb5417 100644
--- a/THIRD_PARTY_NOTICES.md
+++ b/THIRD_PARTY_NOTICES.md
@@ -35,7 +35,6 @@ code, the source code can be found at [https://github.com/newrelic/node-newrelic
* [@aws-sdk/s3-request-presigner](#aws-sdks3-request-presigner)
* [@koa/router](#koarouter)
* [@matteo.collina/tspl](#matteocollinatspl)
-* [@newrelic/eslint-config](#newreliceslint-config)
* [@newrelic/newrelic-oss-cli](#newrelicnewrelic-oss-cli)
* [@newrelic/test-utilities](#newrelictest-utilities)
* [@octokit/rest](#octokitrest)
@@ -52,7 +51,6 @@ code, the source code can be found at [https://github.com/newrelic/node-newrelic
* [conventional-changelog-conventionalcommits](#conventional-changelog-conventionalcommits)
* [conventional-changelog-writer](#conventional-changelog-writer)
* [conventional-commits-parser](#conventional-commits-parser)
-* [eslint-plugin-disable](#eslint-plugin-disable)
* [eslint-plugin-jsdoc](#eslint-plugin-jsdoc)
* [eslint-plugin-sonarjs](#eslint-plugin-sonarjs)
* [eslint](#eslint)
@@ -67,6 +65,7 @@ code, the source code can be found at [https://github.com/newrelic/node-newrelic
* [koa](#koa)
* [lint-staged](#lint-staged)
* [lockfile-lint](#lockfile-lint)
+* [neostandard](#neostandard)
* [nock](#nock)
* [proxyquire](#proxyquire)
* [rimraf](#rimraf)
@@ -90,7 +89,7 @@ code, the source code can be found at [https://github.com/newrelic/node-newrelic
### @grpc/grpc-js
-This product includes source derived from [@grpc/grpc-js](https://github.com/grpc/grpc-node/tree/master/packages/grpc-js) ([v1.12.2](https://github.com/grpc/grpc-node/tree/master/packages/grpc-js/tree/v1.12.2)), distributed under the [Apache-2.0 License](https://github.com/grpc/grpc-node/tree/master/packages/grpc-js/blob/v1.12.2/LICENSE):
+This product includes source derived from [@grpc/grpc-js](https://github.com/grpc/grpc-node/tree/master/packages/grpc-js) ([v1.12.4](https://github.com/grpc/grpc-node/tree/master/packages/grpc-js/tree/v1.12.4)), distributed under the [Apache-2.0 License](https://github.com/grpc/grpc-node/tree/master/packages/grpc-js/blob/v1.12.4/LICENSE):
```
Apache License
@@ -614,7 +613,7 @@ SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
### https-proxy-agent
-This product includes source derived from [https-proxy-agent](https://github.com/TooTallNate/proxy-agents) ([v7.0.5](https://github.com/TooTallNate/proxy-agents/tree/v7.0.5)), distributed under the [MIT License](https://github.com/TooTallNate/proxy-agents/blob/v7.0.5/LICENSE):
+This product includes source derived from [https-proxy-agent](https://github.com/TooTallNate/proxy-agents) ([v7.0.6](https://github.com/TooTallNate/proxy-agents/tree/v7.0.6)), distributed under the [MIT License](https://github.com/TooTallNate/proxy-agents/blob/v7.0.6/LICENSE):
```
(The MIT License)
@@ -643,7 +642,7 @@ SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
### import-in-the-middle
-This product includes source derived from [import-in-the-middle](https://github.com/nodejs/import-in-the-middle) ([v1.11.2](https://github.com/nodejs/import-in-the-middle/tree/v1.11.2)), distributed under the [Apache-2.0 License](https://github.com/nodejs/import-in-the-middle/blob/v1.11.2/LICENSE):
+This product includes source derived from [import-in-the-middle](https://github.com/nodejs/import-in-the-middle) ([v1.12.0](https://github.com/nodejs/import-in-the-middle/tree/v1.12.0)), distributed under the [Apache-2.0 License](https://github.com/nodejs/import-in-the-middle/blob/v1.12.0/LICENSE):
```
Apache License
@@ -1040,7 +1039,7 @@ IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
### winston-transport
-This product includes source derived from [winston-transport](https://github.com/winstonjs/winston-transport) ([v4.8.0](https://github.com/winstonjs/winston-transport/tree/v4.8.0)), distributed under the [MIT License](https://github.com/winstonjs/winston-transport/blob/v4.8.0/LICENSE):
+This product includes source derived from [winston-transport](https://github.com/winstonjs/winston-transport) ([v4.9.0](https://github.com/winstonjs/winston-transport/tree/v4.9.0)), distributed under the [MIT License](https://github.com/winstonjs/winston-transport/blob/v4.9.0/LICENSE):
```
The MIT License (MIT)
@@ -1073,7 +1072,7 @@ SOFTWARE.
### @aws-sdk/client-s3
-This product includes source derived from [@aws-sdk/client-s3](https://github.com/aws/aws-sdk-js-v3) ([v3.668.0](https://github.com/aws/aws-sdk-js-v3/tree/v3.668.0)), distributed under the [Apache-2.0 License](https://github.com/aws/aws-sdk-js-v3/blob/v3.668.0/LICENSE):
+This product includes source derived from [@aws-sdk/client-s3](https://github.com/aws/aws-sdk-js-v3) ([v3.714.0](https://github.com/aws/aws-sdk-js-v3/tree/v3.714.0)), distributed under the [Apache-2.0 License](https://github.com/aws/aws-sdk-js-v3/blob/v3.714.0/LICENSE):
```
Apache License
@@ -1282,7 +1281,7 @@ This product includes source derived from [@aws-sdk/client-s3](https://github.co
### @aws-sdk/s3-request-presigner
-This product includes source derived from [@aws-sdk/s3-request-presigner](https://github.com/aws/aws-sdk-js-v3) ([v3.668.0](https://github.com/aws/aws-sdk-js-v3/tree/v3.668.0)), distributed under the [Apache-2.0 License](https://github.com/aws/aws-sdk-js-v3/blob/v3.668.0/LICENSE):
+This product includes source derived from [@aws-sdk/s3-request-presigner](https://github.com/aws/aws-sdk-js-v3) ([v3.714.0](https://github.com/aws/aws-sdk-js-v3/tree/v3.714.0)), distributed under the [Apache-2.0 License](https://github.com/aws/aws-sdk-js-v3/blob/v3.714.0/LICENSE):
```
Apache License
@@ -1547,215 +1546,6 @@ SOFTWARE.
```
-### @newrelic/eslint-config
-
-This product includes source derived from [@newrelic/eslint-config](https://github.com/newrelic/eslint-config-newrelic) ([v0.3.0](https://github.com/newrelic/eslint-config-newrelic/tree/v0.3.0)), distributed under the [Apache-2.0 License](https://github.com/newrelic/eslint-config-newrelic/blob/v0.3.0/LICENSE):
-
-```
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-```
-
### @newrelic/newrelic-oss-cli
This product includes source derived from [@newrelic/newrelic-oss-cli](https://github.com/newrelic/newrelic-oss-cli) ([v0.1.2](https://github.com/newrelic/newrelic-oss-cli/tree/v0.1.2)), distributed under the [Apache-2.0 License](https://github.com/newrelic/newrelic-oss-cli/blob/v0.1.2/LICENSE):
@@ -2709,7 +2499,7 @@ THE SOFTWARE.
### aws-sdk
-This product includes source derived from [aws-sdk](https://github.com/aws/aws-sdk-js) ([v2.1691.0](https://github.com/aws/aws-sdk-js/tree/v2.1691.0)), distributed under the [Apache-2.0 License](https://github.com/aws/aws-sdk-js/blob/v2.1691.0/LICENSE.txt):
+This product includes source derived from [aws-sdk](https://github.com/aws/aws-sdk-js) ([v2.1692.0](https://github.com/aws/aws-sdk-js/tree/v2.1692.0)), distributed under the [Apache-2.0 License](https://github.com/aws/aws-sdk-js/blob/v2.1692.0/LICENSE.txt):
```
@@ -3108,36 +2898,9 @@ SOFTWARE.
```
-### eslint-plugin-disable
-
-This product includes source derived from [eslint-plugin-disable](https://github.com/mradionov/eslint-plugin-disable) ([v2.0.3](https://github.com/mradionov/eslint-plugin-disable/tree/v2.0.3)), distributed under the [MIT License](https://github.com/mradionov/eslint-plugin-disable/blob/v2.0.3/LICENSE):
-
-```
-Copyright (c) 2015 Michael Radionov (https://github.com/mradionov)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-
-```
-
### eslint-plugin-jsdoc
-This product includes source derived from [eslint-plugin-jsdoc](https://github.com/gajus/eslint-plugin-jsdoc) ([v48.11.0](https://github.com/gajus/eslint-plugin-jsdoc/tree/v48.11.0)), distributed under the [BSD-3-Clause License](https://github.com/gajus/eslint-plugin-jsdoc/blob/v48.11.0/LICENSE):
+This product includes source derived from [eslint-plugin-jsdoc](https://github.com/gajus/eslint-plugin-jsdoc) ([v50.6.1](https://github.com/gajus/eslint-plugin-jsdoc/tree/v50.6.1)), distributed under the [BSD-3-Clause License](https://github.com/gajus/eslint-plugin-jsdoc/blob/v50.6.1/LICENSE):
```
Copyright (c) 2018, Gajus Kuizinas (http://gajus.com/)
@@ -3169,180 +2932,199 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
### eslint-plugin-sonarjs
-This product includes source derived from [eslint-plugin-sonarjs](https://github.com/SonarSource/eslint-plugin-sonarjs) ([v0.18.0](https://github.com/SonarSource/eslint-plugin-sonarjs/tree/v0.18.0)), distributed under the [LGPL-3.0 License](https://github.com/SonarSource/eslint-plugin-sonarjs/blob/v0.18.0/LICENSE):
+This product includes source derived from [eslint-plugin-sonarjs](https://github.com/SonarSource/SonarJS) ([v3.0.1](https://github.com/SonarSource/SonarJS/tree/v3.0.1)), distributed under the [LGPL-3.0-only License](https://github.com/SonarSource/SonarJS/blob/v3.0.1/LICENSE):
```
- GNU LESSER GENERAL PUBLIC LICENSE
- Version 3, 29 June 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc.
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
-
- This version of the GNU Lesser General Public License incorporates
-the terms and conditions of version 3 of the GNU General Public
-License, supplemented by the additional permissions listed below.
-
- 0. Additional Definitions.
-
- As used herein, "this License" refers to version 3 of the GNU Lesser
-General Public License, and the "GNU GPL" refers to version 3 of the GNU
-General Public License.
-
- "The Library" refers to a covered work governed by this License,
-other than an Application or a Combined Work as defined below.
-
- An "Application" is any work that makes use of an interface provided
-by the Library, but which is not otherwise based on the Library.
-Defining a subclass of a class defined by the Library is deemed a mode
-of using an interface provided by the Library.
-
- A "Combined Work" is a work produced by combining or linking an
-Application with the Library. The particular version of the Library
-with which the Combined Work was made is also called the "Linked
-Version".
-
- The "Minimal Corresponding Source" for a Combined Work means the
-Corresponding Source for the Combined Work, excluding any source code
-for portions of the Combined Work that, considered in isolation, are
-based on the Application, and not on the Linked Version.
-
- The "Corresponding Application Code" for a Combined Work means the
-object code and/or source code for the Application, including any data
-and utility programs needed for reproducing the Combined Work from the
-Application, but excluding the System Libraries of the Combined Work.
-
- 1. Exception to Section 3 of the GNU GPL.
-
- You may convey a covered work under sections 3 and 4 of this License
-without being bound by section 3 of the GNU GPL.
-
- 2. Conveying Modified Versions.
-
- If you modify a copy of the Library, and, in your modifications, a
-facility refers to a function or data to be supplied by an Application
-that uses the facility (other than as an argument passed when the
-facility is invoked), then you may convey a copy of the modified
-version:
-
- a) under this License, provided that you make a good faith effort to
- ensure that, in the event an Application does not supply the
- function or data, the facility still operates, and performs
- whatever part of its purpose remains meaningful, or
-
- b) under the GNU GPL, with none of the additional permissions of
- this License applicable to that copy.
-
- 3. Object Code Incorporating Material from Library Header Files.
-
- The object code form of an Application may incorporate material from
-a header file that is part of the Library. You may convey such object
-code under terms of your choice, provided that, if the incorporated
-material is not limited to numerical parameters, data structure
-layouts and accessors, or small macros, inline functions and templates
-(ten or fewer lines in length), you do both of the following:
-
- a) Give prominent notice with each copy of the object code that the
- Library is used in it and that the Library and its use are
- covered by this License.
-
- b) Accompany the object code with a copy of the GNU GPL and this license
- document.
-
- 4. Combined Works.
-
- You may convey a Combined Work under terms of your choice that,
-taken together, effectively do not restrict modification of the
-portions of the Library contained in the Combined Work and reverse
-engineering for debugging such modifications, if you also do each of
-the following:
-
- a) Give prominent notice with each copy of the Combined Work that
- the Library is used in it and that the Library and its use are
- covered by this License.
-
- b) Accompany the Combined Work with a copy of the GNU GPL and this license
- document.
-
- c) For a Combined Work that displays copyright notices during
- execution, include the copyright notice for the Library among
- these notices, as well as a reference directing the user to the
- copies of the GNU GPL and this license document.
-
- d) Do one of the following:
-
- 0) Convey the Minimal Corresponding Source under the terms of this
- License, and the Corresponding Application Code in a form
- suitable for, and under terms that permit, the user to
- recombine or relink the Application with a modified version of
- the Linked Version to produce a modified Combined Work, in the
- manner specified by section 6 of the GNU GPL for conveying
- Corresponding Source.
-
- 1) Use a suitable shared library mechanism for linking with the
- Library. A suitable mechanism is one that (a) uses at run time
- a copy of the Library already present on the user's computer
- system, and (b) will operate properly with a modified version
- of the Library that is interface-compatible with the Linked
- Version.
-
- e) Provide Installation Information, but only if you would otherwise
- be required to provide such information under section 6 of the
- GNU GPL, and only to the extent that such information is
- necessary to install and execute a modified version of the
- Combined Work produced by recombining or relinking the
- Application with a modified version of the Linked Version. (If
- you use option 4d0, the Installation Information must accompany
- the Minimal Corresponding Source and Corresponding Application
- Code. If you use option 4d1, you must provide the Installation
- Information in the manner specified by section 6 of the GNU GPL
- for conveying Corresponding Source.)
-
- 5. Combined Libraries.
-
- You may place library facilities that are a work based on the
-Library side by side in a single library together with other library
-facilities that are not Applications and are not covered by this
-License, and convey such a combined library under terms of your
-choice, if you do both of the following:
-
- a) Accompany the combined library with a copy of the same work based
- on the Library, uncombined with any other library facilities,
- conveyed under the terms of this License.
-
- b) Give prominent notice with the combined library that part of it
- is a work based on the Library, and explaining where to find the
- accompanying uncombined form of the same work.
-
- 6. Revised Versions of the GNU Lesser General Public License.
-
- The Free Software Foundation may publish revised and/or new versions
-of the GNU Lesser General Public License from time to time. Such new
-versions will be similar in spirit to the present version, but may
-differ in detail to address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Library as you received it specifies that a certain numbered version
-of the GNU Lesser General Public License "or any later version"
-applies to it, you have the option of following the terms and
-conditions either of that published version or of any later version
-published by the Free Software Foundation. If the Library as you
-received it does not specify a version number of the GNU Lesser
-General Public License, you may choose any version of the GNU Lesser
-General Public License ever published by the Free Software Foundation.
-
- If the Library as you received it specifies that a proxy can decide
-whether future versions of the GNU Lesser General Public License shall
-apply, that proxy's public statement of acceptance of any version is
-permanent authorization for you to choose that version for the
-Library.
+SONAR Source-Available License v1.0
+Last Updated November 13, 2024
+
+1. DEFINITIONS
+
+"Agreement" means this Sonar Source-Available License v1.0
+
+"Competing" means marketing a product or service as a substitute for the
+functionality or value of SonarQube. A product or service may compete regardless
+of how it is designed or deployed. For example, a product or service may compete
+even if it provides its functionality via any kind of interface (including
+services, libraries, or plug-ins), even if it is ported to a different platform
+or programming language, and even if it is provided free of charge.
+
+"Contribution" means:
+
+ a) in the case of the initial Contributor, the initial content Distributed under
+this Agreement, and
+
+ b) in the case of each subsequent Contributor:
+ i) changes to the Program, and
+ ii) additions to the Program;
+
+where such changes and/or additions to the Program originate from and are
+Distributed by that particular Contributor. A Contribution "originates" from a
+Contributor if it was added to the Program by such Contributor itself or anyone
+acting on such Contributor's behalf. Contributions do not include changes or
+additions to the Program that are not Modified Works.
+
+"Contributor" means any person or entity that Distributes the Program.
+
+"Derivative Works" shall mean any work, whether in Source Code or other form,
+that is based on (or derived from) the Program and for which the editorial
+revisions, annotations, elaborations, or other modifications represent, as a
+whole, an original work of authorship.
+
+"Distribute" means the acts of a) distributing or b) making available in any
+manner that enables the transfer of a copy.
+
+"Licensed Patents" mean patent claims licensable by a Contributor that are
+necessarily infringed by the use or sale of its Contribution alone or when
+combined with the Program.
+
+"Modified Works" shall mean any work in Source Code or other form that results
+from an addition to, deletion from, or modification of the contents of the
+Program, including, for purposes of clarity, any new file in Source Code form
+that contains any contents of the Program. Modified Works shall not include
+works that contain only declarations, interfaces, types, classes, structures, or
+files of the Program solely in each case in order to link to, bind by name, or
+subclass the Program or Modified Works thereof.
+
+"Non-competitive Purpose" means any purpose except for (a) providing to others
+any product or service that includes or offers the same or substantially similar
+functionality as SonarQube, (b) Competing with SonarQube, and/or (c) employing,
+using, or engaging artificial intelligence technology that is not part of the
+Program to ingest, interpret, analyze, train on, or interact with the data
+provided by the Program, or to engage with the Program in any manner.
+
+"Notices" means any legal statements or attributions included with the Program,
+including, without limitation, statements concerning copyright, patent,
+trademark, disclaimers of warranty, or limitations of liability
+
+"Program" means the Contributions Distributed in accordance with this Agreement.
+
+"Recipient" means anyone who receives the Program under this Agreement,
+including Contributors.
+
+"SonarQube" means an open-source or commercial edition of software offered by
+SonarSource that is branded "SonarQube".
+
+"SonarSource" means SonarSource SA, a Swiss company registered in Switzerland
+under UID No. CHE-114.587.664.
+
+"Source Code" means the form of a Program preferred for making modifications,
+including but not limited to software source code, documentation source, and
+configuration files.
+
+2. GRANT OF RIGHTS
+
+ a) Subject to the terms of this Agreement, each Contributor hereby grants
+Recipient a non-exclusive, worldwide, royalty-free copyright license, for any
+Non-competitive Purpose, to reproduce, prepare Derivative Works of, publicly
+display, publicly perform, Distribute and sublicense the Contribution of such
+Contributor, if any, and such Derivative Works.
+
+ b) Subject to the terms of this Agreement, each Contributor hereby grants
+Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed
+Patents, for any Non-competitive Purpose, to make, use, sell, offer to sell,
+import, and otherwise transfer the Contribution of such Contributor, if any, in
+Source Code or other form. This patent license shall apply to the combination of
+the Contribution and the Program if, at the time the Contribution is added by
+the Contributor, such addition of the Contribution causes such combination to be
+covered by the Licensed Patents. The patent license shall not apply to any other
+combinations that include the Contribution.
+
+ c) Recipient understands that although each Contributor grants the licenses to
+its Contributions set forth herein, no assurances are provided by any
+Contributor that the Program does not infringe the patent or other intellectual
+property rights of any other entity. Each Contributor disclaims any liability to
+Recipient for claims brought by any other entity based on infringement of
+intellectual property rights or otherwise. As a condition to exercising the
+rights and licenses granted hereunder, each Recipient hereby assumes sole
+responsibility to secure any other intellectual property rights needed, if any.
+For example, if a third-party patent license is required to allow Recipient to
+Distribute the Program, it is Recipient's responsibility to acquire that license
+before distributing the Program.
+
+ d) Each Contributor represents that to its knowledge it has sufficient copyright
+rights in its Contribution, if any, to grant the copyright license set forth in
+this Agreement.
+
+3. REQUIREMENTS
+
+3.1 If a Contributor Distributes the Program in any form, then the Program must
+also be made available as Source Code, in accordance with section 3.2, and the
+Contributor must accompany the Program with a statement that the Source Code for
+the Program is available under this Agreement, and inform Recipients how to
+obtain it in a reasonable manner on or through a medium customarily used for
+software exchange; and
+
+3.2 When the Program is Distributed as Source Code:
+
+ a) it must be made available under this Agreement, and
+
+ b) a copy of this Agreement must be included with each copy of the Program.
+
+3.3 Contributors may not remove or alter any Notices contained within the
+Program from any copy of the Program which they Distribute, provided that
+Contributors may add their own appropriate Notices.
+
+4. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY
+APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
+OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT
+LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT,
+MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely
+responsible for determining the appropriateness of using and distributing the
+Program and assumes all risks associated with its exercise of rights under this
+Agreement, including but not limited to the risks and costs of program errors,
+compliance with applicable laws, damage to or loss of data, programs or
+equipment, and unavailability or interruption of operations.
+
+5. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY
+APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF
+THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF
+THE POSSIBILITY OF SUCH DAMAGES.
+
+6. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under applicable
+law, it shall not affect the validity or enforceability of the remainder of the
+terms of this Agreement, and without further action by the parties hereto, such
+provision shall be reformed to the minimum extent necessary to make such
+provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity (including a
+cross-claim or counterclaim in a lawsuit) alleging that the Program itself
+(excluding combinations of the Program with other software or hardware)
+infringes such Recipient’s patent(s), then such Recipient’s rights granted under
+Section 2(b) shall terminate as of the date such litigation is filed.
+
+All Recipient’s rights under this Agreement shall terminate if it fails to
+comply with any of the material terms or conditions of this Agreement and does
+not cure such failure in a reasonable period of time after becoming aware of
+such noncompliance. If all Recipient’s rights under this Agreement terminate,
+Recipient agrees to cease use and distribution of the Program as soon as
+reasonably practicable. However, Recipient’s obligations under this Agreement
+and any licenses granted by Recipient relating to the Program shall continue and
+survive.
+
+Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives
+no rights or licenses to the intellectual property of any Contributor under this
+Agreement, whether expressly, by implication, estoppel, or otherwise. All rights
+in the Program not expressly granted under this Agreement are reserved. Nothing
+in this Agreement is intended to be enforceable by any entity that is not a
+Contributor or Recipient. No third-party beneficiary rights are created under
+this Agreement.
```
### eslint
-This product includes source derived from [eslint](https://github.com/eslint/eslint) ([v8.57.1](https://github.com/eslint/eslint/tree/v8.57.1)), distributed under the [MIT License](https://github.com/eslint/eslint/blob/v8.57.1/LICENSE):
+This product includes source derived from [eslint](https://github.com/eslint/eslint) ([v9.17.0](https://github.com/eslint/eslint/tree/v9.17.0)), distributed under the [MIT License](https://github.com/eslint/eslint/blob/v9.17.0/LICENSE):
```
Copyright OpenJS Foundation and other contributors,
@@ -3369,7 +3151,7 @@ THE SOFTWARE.
### express
-This product includes source derived from [express](https://github.com/expressjs/express) ([v4.21.1](https://github.com/expressjs/express/tree/v4.21.1)), distributed under the [MIT License](https://github.com/expressjs/express/blob/v4.21.1/LICENSE):
+This product includes source derived from [express](https://github.com/expressjs/express) ([v4.21.2](https://github.com/expressjs/express/tree/v4.21.2)), distributed under the [MIT License](https://github.com/expressjs/express/blob/v4.21.2/LICENSE):
```
(The MIT License)
@@ -3505,7 +3287,7 @@ SOFTWARE.
### jsdoc
-This product includes source derived from [jsdoc](https://github.com/jsdoc/jsdoc) ([v4.0.3](https://github.com/jsdoc/jsdoc/tree/v4.0.3)), distributed under the [Apache-2.0 License](https://github.com/jsdoc/jsdoc/blob/v4.0.3/LICENSE.md):
+This product includes source derived from [jsdoc](https://github.com/jsdoc/jsdoc) ([v4.0.4](https://github.com/jsdoc/jsdoc/tree/v4.0.4)), distributed under the [Apache-2.0 License](https://github.com/jsdoc/jsdoc/blob/v4.0.4/LICENSE.md):
```
# License
@@ -3915,6 +3697,35 @@ This product includes source derived from [lockfile-lint](https://github.com/lir
limitations under the License.
```
+### neostandard
+
+This product includes source derived from [neostandard](https://github.com/neostandard/neostandard) ([v0.12.0](https://github.com/neostandard/neostandard/tree/v0.12.0)), distributed under the [MIT License](https://github.com/neostandard/neostandard/blob/v0.12.0/LICENSE):
+
+```
+The MIT License (MIT)
+
+Copyright (c) 2024 neostandard contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+```
+
### nock
This product includes source derived from [nock](https://github.com/nock/nock) ([v11.8.0](https://github.com/nock/nock/tree/v11.8.0)), distributed under the [MIT License](https://github.com/nock/nock/blob/v11.8.0/LICENSE):
diff --git a/api.js b/api.js
index cde8f4f663..b0c98607ee 100644
--- a/api.js
+++ b/api.js
@@ -905,7 +905,7 @@ API.prototype.startSegment = function startSegment(name, record, handler, callba
// Create the segment and call the handler.
const wrappedHandler = this.shim.record(handler, function handlerNamer(shim) {
return {
- name: name,
+ name,
recorder: record ? customRecorder : null,
callback: callback ? shim.FIRST : null,
promise: !callback
@@ -1301,6 +1301,7 @@ API.prototype.recordCustomEvent = function recordCustomEvent(eventType, attribut
}
const tx = this.agent.getTransaction()
+ // eslint-disable-next-line sonarjs/pseudo-random
const priority = (tx && tx.priority) || Math.random()
this.agent.customEventAggregator.add([intrinsics, filteredAttributes], priority)
}
@@ -1326,9 +1327,9 @@ API.prototype.instrument = function instrument(moduleName, onRequire, onError) {
let opts = moduleName
if (typeof opts === 'string') {
opts = {
- moduleName: moduleName,
- onRequire: onRequire,
- onError: onError
+ moduleName,
+ onRequire,
+ onError
}
}
@@ -1391,9 +1392,9 @@ API.prototype.instrumentDatastore = function instrumentDatastore(moduleName, onR
let opts = moduleName
if (typeof opts === 'string') {
opts = {
- moduleName: moduleName,
- onRequire: onRequire,
- onError: onError
+ moduleName,
+ onRequire,
+ onError
}
}
@@ -1428,9 +1429,9 @@ API.prototype.instrumentWebframework = function instrumentWebframework(
let opts = moduleName
if (typeof opts === 'string') {
opts = {
- moduleName: moduleName,
- onRequire: onRequire,
- onError: onError
+ moduleName,
+ onRequire,
+ onError
}
}
@@ -1461,9 +1462,9 @@ API.prototype.instrumentMessages = function instrumentMessages(moduleName, onReq
let opts = moduleName
if (typeof opts === 'string') {
opts = {
- moduleName: moduleName,
- onRequire: onRequire,
- onError: onError
+ moduleName,
+ onRequire,
+ onError
}
}
diff --git a/bin/compare-bench-results.js b/bin/compare-bench-results.js
index 613b6a82fb..11cdc500a5 100644
--- a/bin/compare-bench-results.js
+++ b/bin/compare-bench-results.js
@@ -4,7 +4,6 @@
*/
'use strict'
-/* eslint-disable sonarjs/no-duplicate-string, no-console */
const fs = require('fs/promises')
const { errorAndExit } = require('./utils')
@@ -114,7 +113,7 @@ const reportResults = async (resultFiles) => {
const resultPath = 'benchmark_results'
try {
await fs.stat(resultPath)
- } catch (e) {
+ } catch {
await fs.mkdir(resultPath)
}
const fileName = `${resultPath}/comparison_${date.getTime()}.md`
diff --git a/bin/conventional-changelog.js b/bin/conventional-changelog.js
index 2acb534aa5..a61e10842a 100644
--- a/bin/conventional-changelog.js
+++ b/bin/conventional-changelog.js
@@ -137,7 +137,7 @@ class ConventionalChangelog {
* @returns {string} the commit message header with any PR links removed and whitespace trimmed
*/
removePrLinks(subject) {
- return subject.replace(/\(\#\d+\)$/, '').trim()
+ return subject.replace(/\(#\d+\)$/, '').trim()
}
/**
@@ -211,7 +211,7 @@ class ConventionalChangelog {
const markdownFormatter = conventionalChangelogWriter(context, {
...config.writerOpts,
- mainTemplate: mainTemplate,
+ mainTemplate,
headerPartial: headerTemplate,
commitPartial: commitTemplate,
commitGroupsSort: self.rankedGroupSort
diff --git a/bin/create-docs-pr.js b/bin/create-docs-pr.js
index feb76d8f1c..a14bc32794 100644
--- a/bin/create-docs-pr.js
+++ b/bin/create-docs-pr.js
@@ -33,6 +33,7 @@ program.option('--dry-run', 'executes script but does not commit nor create PR')
program.option(
'--repo-path {
console.log(`Executing: '${command}'`)
+ // eslint-disable-next-line sonarjs/os-command
exec(command, (err, stdout) => {
if (err) {
reject(err)
diff --git a/bin/github.js b/bin/github.js
index 095a859aff..761b3ed4ea 100644
--- a/bin/github.js
+++ b/bin/github.js
@@ -45,8 +45,8 @@ class Github {
owner: this.repoOwner,
repo: this.repository,
tag_name: tag,
- name: name,
- body: body
+ name,
+ body
})
return result.data
@@ -146,7 +146,7 @@ class Github {
owner: this.repoOwner,
repo: this.repository,
workflow_id: nameOrId,
- branch: branch,
+ branch,
per_page: 5
})
@@ -159,11 +159,11 @@ class Github {
await octokit.pulls.create({
owner: this.repoOwner,
repo: this.repository,
- head: head,
- base: base,
- title: title,
- body: body,
- draft: draft
+ head,
+ base,
+ title,
+ body,
+ draft
})
}
@@ -181,7 +181,7 @@ class Github {
release_id: id,
body
})
- } catch (err) {
+ } catch {
await new Promise((resolve) => {
const retryWait = 2 ** retryCount * 1000
console.log(
diff --git a/bin/npm-commands.js b/bin/npm-commands.js
index 62ca4f9fc8..b34cdc177a 100644
--- a/bin/npm-commands.js
+++ b/bin/npm-commands.js
@@ -19,6 +19,7 @@ function execAsPromise(command) {
return new Promise((resolve, reject) => {
console.log(`Executing: '${command}'`)
+ // eslint-disable-next-line sonarjs/os-command
exec(command, (err, stdout) => {
if (err) {
return reject(err)
diff --git a/bin/pending-prs.js b/bin/pending-prs.js
index c0f110a6d2..f32d31e195 100644
--- a/bin/pending-prs.js
+++ b/bin/pending-prs.js
@@ -91,9 +91,9 @@ function stopOnError(err) {
function areEnvVarsSet(dryRun) {
if (dryRun) {
- return process.env.hasOwnProperty('GITHUB_TOKEN')
+ return Object.prototype.hasOwnProperty.call(process.env, 'GITHUB_TOKEN')
}
- missingEnvVars = requiredEnvVars.filter((envVar) => !process.env.hasOwnProperty(envVar))
+ missingEnvVars = requiredEnvVars.filter((envVar) => !Object.prototype.hasOwnProperty.call(process.env, envVar))
return missingEnvVars.length === 0
}
diff --git a/bin/run-bench.js b/bin/run-bench.js
index 82127faae5..036c05439e 100755
--- a/bin/run-bench.js
+++ b/bin/run-bench.js
@@ -69,15 +69,13 @@ class Printer {
async finish() {
if (opts.console) {
- /* eslint-disable no-console */
console.log(JSON.stringify(this._tests, null, 2))
- /* eslint-enable no-console */
}
const resultPath = 'benchmark_results'
const filePrefix = opts.filename ? `${opts.filename}` : 'benchmark'
try {
await fs.stat(resultPath)
- } catch (e) {
+ } catch {
await fs.mkdir(resultPath)
}
const content = JSON.stringify(this._tests, null, 2)
@@ -95,7 +93,7 @@ async function run() {
const resolveGlobs = () => {
if (!globs.length) {
- console.error(`There aren't any globs to resolve.`)
+ console.error("There aren't any globs to resolve.")
return
}
const afterGlobbing = (resolved) => {
@@ -127,7 +125,8 @@ async function run() {
args.unshift('--inspect-brk')
}
- const child = cp.spawn('node', args, { cwd: cwd, stdio: 'pipe', silent: true })
+ // eslint-disable-next-line sonarjs/no-os-command-from-path
+ const child = cp.spawn('node', args, { cwd, stdio: 'pipe', silent: true })
child.on('error', (err) => {
console.error(`Error in child test ${test}`, err)
diff --git a/bin/test-naming-rules.js b/bin/test-naming-rules.js
index 4f6222db92..1aee5361bf 100755
--- a/bin/test-naming-rules.js
+++ b/bin/test-naming-rules.js
@@ -1,11 +1,10 @@
-#! /usr/bin/env node
+#!/usr/bin/env node
/*
* Copyright 2020 New Relic Corporation. All rights reserved.
* SPDX-License-Identifier: Apache-2.0
*/
'use strict'
-/* eslint-disable no-console */
const fs = require('fs')
const path = require('path')
@@ -110,7 +109,7 @@ function run(opts) {
function onAppliedRule(rule, newValue, oldValue) {
appliedRules.push({
- rule: rule,
+ rule,
original: oldValue,
normalized: newValue
})
diff --git a/bin/test/create-docs-pr.test.js b/bin/test/create-docs-pr.test.js
index 0f9745266c..2f0f003526 100644
--- a/bin/test/create-docs-pr.test.js
+++ b/bin/test/create-docs-pr.test.js
@@ -70,7 +70,6 @@ test('Create Docs PR script', async (t) => {
const { mockFs, script } = t.nr
mockFs.readFile.yields(null, JSON.stringify({ entries: [{ version: '1.2.3', changes: [] }] }))
- // eslint-disable-next-line sonarjs/no-duplicate-string
const func = () => script.getFrontMatter('v2.0.0', 'changelog.json')
await assert.rejects(func, { message: 'Unable to find 2.0.0 entry in changelog.json' })
})
@@ -151,13 +150,13 @@ test('Create Docs PR script', async (t) => {
const expected = [
'---',
'subject: Node.js agent',
- `releaseDate: '2020-04-03'`,
+ "releaseDate: '2020-04-03'",
'version: 2.0.0',
- `downloadLink: 'https://www.npmjs.com/package/newrelic'`,
- `security: ["upgraded a dep"]`,
- `bugs: ["fixed a bug"]`,
- `features: ["added new api method"]`,
- `---`,
+ "downloadLink: 'https://www.npmjs.com/package/newrelic'",
+ 'security: ["upgraded a dep"]',
+ 'bugs: ["fixed a bug"]',
+ 'features: ["added new api method"]',
+ '---',
'',
'## Notes',
'',
diff --git a/bin/utils.js b/bin/utils.js
index 58ee8e570b..83c8bd20e8 100644
--- a/bin/utils.js
+++ b/bin/utils.js
@@ -1,4 +1,3 @@
-#! /usr/bin/env node
/*
* Copyright 2020 New Relic Corporation. All rights reserved.
* SPDX-License-Identifier: Apache-2.0
diff --git a/eslint-plugin-newrelic-header.js b/eslint-plugin-newrelic-header.js
new file mode 100644
index 0000000000..23954eda11
--- /dev/null
+++ b/eslint-plugin-newrelic-header.js
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2024 New Relic Corporation. All rights reserved.
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+'use strict'
+
+const headerTmpl = `
+/*
+ * Copyright {{year}} New Relic Corporation. All rights reserved.
+ * SPDX-License-Identifier: Apache-2.0
+ */
+`.trim()
+
+const rule = {
+ meta: {
+ type: 'layout',
+ fixable: 'whitespace',
+ schema: false
+ },
+
+ create(context) {
+ return {
+ Program(node) {
+ const src = context.sourceCode.getText()
+ if (hasHeader(src) === true) {
+ return
+ }
+ context.report({
+ loc: node.loc,
+ message: 'missing or invalid header',
+ fix(fixer) {
+ const rendered = headerTmpl.replace('{{year}}', new Date().getFullYear() + '') + '\n\n'
+ if (hasShebang(src) === true) {
+ return fixer.insertTextAfterRange([0, src.indexOf('\n')], '\n' + rendered)
+ }
+ return fixer.insertTextBefore(
+ node,
+ rendered
+ )
+ }
+ })
+ }
+ }
+ }
+}
+
+module.exports = {
+ meta: {
+ name: 'eslint-plugin-newrelic-header',
+ version: '1.0.0'
+ },
+ rules: {
+ header: rule
+ }
+}
+
+function hasShebang(src) {
+ return /^#!\s?\//.test(src)
+}
+
+function hasHeader(src) {
+ const headerLines = src.split('\n').slice(0, 5)
+ if (hasShebang(src) === true) {
+ headerLines.shift()
+ }
+ return headerLines[0] === '/*' &&
+ / \* Copyright \d{4} New Relic Corporation\. All rights reserved\./.test(headerLines[1]) === true &&
+ / \* SPDX-License-Identifier: Apache-2\.0/.test(headerLines[2]) === true &&
+ headerLines[3] === ' */'
+}
diff --git a/eslint.config.js b/eslint.config.js
new file mode 100644
index 0000000000..ce7a604e0f
--- /dev/null
+++ b/eslint.config.js
@@ -0,0 +1,219 @@
+/*
+ * Copyright 2024 New Relic Corporation. All rights reserved.
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+'use strict'
+
+const neostandard = require('neostandard')
+const jsdoc = require('eslint-plugin-jsdoc')
+const sonarjs = require('eslint-plugin-sonarjs')
+const header = require('./eslint-plugin-newrelic-header.js')
+
+// The new eslint configuration format is a simple array of configuration
+// objects. See https://eslint.org/docs/latest/use/configure/configuration-files#configuration-objects.
+//
+// While working on the config, it can be helpful to run:
+// npx @eslint/config-inspector
+
+// This should be used to override rules we don't need applied to our
+// test suites.
+const testFiles = [
+ 'test/benchmark/**',
+ 'test/integration/**',
+ 'test/unit/**',
+ 'test/smoke/**',
+ 'test/versioned/**',
+ 'bin/test/**'
+]
+
+// See https://eslint.org/docs/latest/use/configure/ignore#ignoring-files
+const globalIgnores = {
+ ignores: [
+ '**/node_modules/**',
+ 'docs/',
+ 'out/', // Compiled jsdocs directory.
+ 'test/versioned-external',
+ 'test/versioned/nextjs/app',
+ 'test/versioned/nextjs/app-dir'
+ ]
+}
+
+const localConfig = {
+ plugins: {
+ header
+ },
+
+ rules: {
+ 'consistent-return': 'off',
+ 'no-console': 'warn',
+
+ // Enable file header checking and autocorrection.
+ 'header/header': 'error',
+
+ // This one enforces `!!thing` syntax, which some folks find difficult
+ // to read:
+ 'no-unneeded-ternary': 'off',
+
+ // There are times we'd rather have import statements close to the
+ // thing that needed them:
+ 'import-x/first': 'off',
+
+ // Prefer single quotes, but utilize others to avoid escaping:
+ '@stylistic/quotes': ['error', 'single', { avoidEscape: true }],
+
+ // Neostandard sets this to "warn", but we'd like "error":
+ 'object-shorthand': 'error',
+
+ // These neostandard would generate unnecessary noise:
+ '@stylistic/space-before-function-paren': 'off'
+ },
+
+ linterOptions: {
+ reportUnusedDisableDirectives: 'error'
+ }
+}
+
+const newrelicConfigOverrides = {
+ files: ['**/newrelic.js', '**/newrelic.mjs'],
+ rules: {
+ 'header/header': 'off'
+ }
+}
+
+const sonarjsTestsConfig = {
+ files: testFiles,
+
+ rules: {
+ // We sometimes need to shadow things like Promise for testing:
+ 'sonarjs/no-globals-shadowing': 'off',
+ // Sonar doesn't like our test files that build tests:
+ 'sonarjs/no-empty-test-file': 'off',
+ // Some of our tests hit local HTTP endpoints:
+ 'sonarjs/no-clear-text-protocols': 'off',
+ // We don't always need secure random in tests:
+ 'sonarjs/pseudo-random': 'off',
+ // We need to use `os.exec` and such at times:
+ 'sonarjs/os-command': 'off',
+ 'sonarjs/no-os-command-from-path': 'off',
+ // We have to use bunk passwords in tests:
+ 'sonarjs/no-hardcoded-passwords': 'off', // eslint-disable-line
+ // We will have slow regular expressions in tests and it is okay:
+ 'sonarjs/slow-regex': 'off',
+ // The x-powered-by header has no bearing on the quality of our tests:
+ 'sonarjs/x-powered-by': 'off',
+ // We sometimes need to build new functions via `new Function`:
+ 'sonarjs/code-eval': 'off',
+ 'no-new-func': 'off',
+ // Sometimes we add dummy values that sonar doesn't like:
+ 'sonarjs/no-hardcoded-ip': 'off',
+ // We need some side effect constructors in tests:
+ 'sonarjs/constructor-for-side-effects': 'off',
+ // Tests don't need "safe" permissions:
+ 'sonarjs/file-permissions': 'off',
+ }
+}
+
+const sonarjsOverrides = {
+ rules: {
+ // This rule picks up inlined lambda functions as a violation:
+ 'sonarjs/no-nested-functions': 'off',
+
+ // Don't bug us. We'll get to it (maybe):
+ 'sonarjs/todo-tag': 'warn',
+ 'sonarjs/fixme-tag': 'warn',
+
+ // Sonar be on that stuff. `static readonly FOO` is not valid JavaScript:
+ 'sonarjs/public-static-readonly': 'off',
+
+ // Agree to disagree on their explanation for this one:
+ 'sonarjs/no-parameter-reassignment': 'off'
+ }
+}
+
+const jsdocConfig = {
+ plugins: { jsdoc },
+ rules: {
+ 'jsdoc/require-jsdoc': 'off',
+ 'jsdoc/tag-lines': 'off',
+ 'jsdoc/check-types': 'off',
+ 'jsdoc/no-undefined-types': [
+ 'warn',
+ {
+ definedTypes: [
+ 'Logger',
+ 'Agent',
+ 'Shim',
+ 'MessageShim',
+ 'TraceSegment',
+ 'Transaction',
+ 'Tracer',
+ 'Exception',
+ 'MetricAggregator',
+ 'EventEmitter'
+ ]
+ }
+ ]
+ }
+}
+const jsdocOverrides = {
+ files: [
+ './lib/shim/*.js',
+ 'lib/transaction/handle.js',
+ 'api.js'
+ ],
+ rules: {
+ 'jsdoc/require-jsdoc': 'warn'
+ }
+}
+
+const nodeRecommended = neostandard.plugins.n.configs['flat/recommended']
+delete nodeRecommended.languageOptions.sourceType
+nodeRecommended.rules['n/no-unsupported-features/node-builtins'] = ['error', { version: '>=18.8.0' }]
+nodeRecommended.rules['n/no-process-exit'] = 'off'
+nodeRecommended.rules['n/no-deprecated-api'] = [
+ 'error',
+ {
+ ignoreModuleItems: [
+ 'url.parse'
+ ]
+ }
+]
+nodeRecommended.ignores = testFiles
+
+// Configuration objects are merged in order. That is, the last object in the
+// list will merge with objects earlier in the list. This allows for overriding
+// any settings by adding objects to the end of the list.
+// See:
+// + https://eslint.org/docs/latest/use/configure/configuration-files#cascading-configuration-objectsar
+// + https://eslint.org/blog/2022/08/new-config-system-part-2/#goodbye-extends%2C-hello-flat-cascade
+module.exports = [
+ // Apply baseline configuration.
+ ...neostandard(),
+
+ // Add sonarjs config:
+ sonarjs.configs.recommended,
+ sonarjsTestsConfig,
+ sonarjsOverrides,
+
+ // Add jsdoc config:
+ jsdoc.configs['flat/recommended'],
+ jsdocConfig,
+ jsdocOverrides,
+
+ // Add customized eslint-plugin-n recommended rules:
+ nodeRecommended,
+ {
+ files: [
+ 'bin/*.js'
+ ],
+ rules: {
+ 'n/hashbang': 'off'
+ }
+ },
+
+ // Apply local configuration and overrides:
+ localConfig,
+ newrelicConfigOverrides,
+ globalIgnores
+]
diff --git a/index.js b/index.js
index 426442093c..27cc2034e9 100644
--- a/index.js
+++ b/index.js
@@ -108,10 +108,8 @@ function initialize() {
message = 'New Relic for Node.js was unable to bootstrap itself due to an error:'
logger.error(error, message)
- /* eslint-disable no-console */
console.error(message)
console.error(error.stack)
- /* eslint-enable no-console */
}
const api = agent ? initApi({ agent, apiPath: 'api' }) : initApi({ apiPath: 'stub_api' })
@@ -172,10 +170,8 @@ function createAgent(config) {
const errorMessage = 'New Relic for Node.js halted startup due to an error:'
logger.error(error, errorMessage)
- /* eslint-disable no-console */
console.error(errorMessage)
console.error(error.stack)
- /* eslint-enable no-console */
return
}
diff --git a/lib/agent.js b/lib/agent.js
index 85d06cdcc9..f634a28064 100644
--- a/lib/agent.js
+++ b/lib/agent.js
@@ -635,7 +635,7 @@ Agent.prototype.reconfigure = function reconfigure(configuration) {
* @fires Agent#stopping
*/
Agent.prototype.setState = function setState(newState) {
- if (!STATES.hasOwnProperty(newState)) {
+ if (!Object.prototype.hasOwnProperty.call(STATES, newState)) {
throw new TypeError('Invalid state ' + newState)
}
@@ -719,6 +719,7 @@ Agent.prototype._addEventFromTransaction = function _addEventFromTransaction(tx)
const event = [intrinsicAttributes, userAttributes, agentAttributes]
+ // eslint-disable-next-line sonarjs/pseudo-random
this.transactionEventAggregator.add(event, tx.priority || Math.random())
}
diff --git a/lib/aggregators/base-aggregator.js b/lib/aggregators/base-aggregator.js
index cb73a03de3..fefecb52de 100644
--- a/lib/aggregators/base-aggregator.js
+++ b/lib/aggregators/base-aggregator.js
@@ -251,7 +251,7 @@ class Aggregator extends EventEmitter {
}
// This can be synchronous for the serverless collector.
- this.collector.send(this.method, payload, (error, response) => {
+ this.collector.send(this.method, payload, (_, response) => {
if (response && response.retainData) {
this._merge(data)
}
@@ -291,7 +291,7 @@ class Aggregator extends EventEmitter {
const data = this._getMergeData()
if (this.isAsync) {
- this._toPayload((err, payload) => {
+ this._toPayload((_, payload) => {
this._runSend(data, payload)
})
} else {
diff --git a/lib/aggregators/trace-aggregator.js b/lib/aggregators/trace-aggregator.js
index 628bd9277e..c787e2008e 100644
--- a/lib/aggregators/trace-aggregator.js
+++ b/lib/aggregators/trace-aggregator.js
@@ -7,10 +7,6 @@
const Aggregator = require('./base-aggregator')
-class TraceAggregator extends Aggregator {
- constructor(opts, collector, harvester) {
- super(opts, collector, harvester)
- }
-}
+class TraceAggregator extends Aggregator {}
module.exports = TraceAggregator
diff --git a/lib/attributes.js b/lib/attributes.js
index 5b6ec8205d..bd4fcd050d 100644
--- a/lib/attributes.js
+++ b/lib/attributes.js
@@ -22,7 +22,7 @@ class Attributes {
* @param {string} scope
* The scope of the attributes this will collect. Must be `transaction` or
* `segment`.
- * @param {number} [limit=Infinity]
+ * @param {number} [limit]
* The maximum number of attributes to retrieve for each destination.
*/
constructor(scope, limit = Infinity) {
@@ -64,9 +64,9 @@ class Attributes {
*/
get(dest) {
const attrs = Object.create(null)
- // eslint-disable-next-line guard-for-in
for (const key in this.attributes) {
const attr = this.attributes[key]
+ // eslint-disable-next-line sonarjs/bitwise-operators
if (!(attr.destinations & dest)) {
continue
}
@@ -104,7 +104,7 @@ class Attributes {
* @param {DESTINATIONS} destinations - The default destinations for this key.
* @param {string} key - The attribute name.
* @param {string} value - The attribute value.
- * @param {boolean} [truncateExempt=false] - Flag marking value exempt from truncation
+ * @param {boolean} [truncateExempt] - Flag marking value exempt from truncation
*/
addAttribute(destinations, key, value, truncateExempt = false) {
if (this.attributeCount + 1 > this.limit) {
@@ -180,6 +180,6 @@ function makeFilter(scope) {
}
module.exports = {
- Attributes: Attributes,
- MAXIMUM_CUSTOM_ATTRIBUTES: MAXIMUM_CUSTOM_ATTRIBUTES
+ Attributes,
+ MAXIMUM_CUSTOM_ATTRIBUTES
}
diff --git a/lib/collector/api.js b/lib/collector/api.js
index 4a33a9c9ae..c7a0a572d2 100644
--- a/lib/collector/api.js
+++ b/lib/collector/api.js
@@ -5,7 +5,6 @@
'use strict'
-const http = require('http')
const CollectorResponse = require('./response')
const facts = require('./facts')
const logger = require('../logger').child({ component: 'collector_api' })
@@ -341,7 +340,7 @@ CollectorAPI.prototype._handlePreConnectResponse = function _handlePreConnectRes
const [host, port] = parts
const newEndpoint = {
- host: host,
+ host,
port: port || DEFAULT_PORT
}
@@ -614,7 +613,6 @@ CollectorAPI.prototype._handleResponseCode = _handleResponseCode
function _handleResponseCode(response, endpoint, cb) {
const code = response.status
- /* eslint-disable padded-blocks */
if (SUCCESS.has(code)) {
// The request was a success!
setImmediate(cb, null, CollectorResponse.success(response.payload))
diff --git a/lib/collector/facts.js b/lib/collector/facts.js
index bced9d3420..035b5d6aed 100644
--- a/lib/collector/facts.js
+++ b/lib/collector/facts.js
@@ -37,7 +37,7 @@ async function facts(agent, callback, { logger = defaultLogger } = {}) {
metadata_version: 5,
logical_processors: systemInfo.logicalProcessors || null,
total_ram_mib: systemInfo.memory || null,
- hostname: hostname
+ hostname
},
pid: process.pid,
host: hostname,
@@ -45,7 +45,7 @@ async function facts(agent, callback, { logger = defaultLogger } = {}) {
language: 'nodejs',
app_name: agent.config.applications(),
agent_version: agent.version,
- environment: environment,
+ environment,
settings: agent.config.publicSettings(),
high_security: agent.config.high_security,
labels: agent.config.parsedLabels,
diff --git a/lib/collector/http-agents.js b/lib/collector/http-agents.js
index 7d45f7ccad..d49484fdff 100644
--- a/lib/collector/http-agents.js
+++ b/lib/collector/http-agents.js
@@ -27,7 +27,7 @@ exports.keepAliveAgent = function keepAliveAgent(config) {
// always return the same agent instance, which
// ensures all requests share the same http
// connection
- if (null !== agentKeepAlive) {
+ if (agentKeepAlive !== null) {
return agentKeepAlive
}
@@ -48,7 +48,7 @@ exports.keepAliveAgent = function keepAliveAgent(config) {
* @param config
*/
exports.proxyAgent = function proxyAgent(config) {
- if (null !== agentProxyWithKeepAlive) {
+ if (agentProxyWithKeepAlive !== null) {
return agentProxyWithKeepAlive
}
const proxyUrl = proxyOptions(config)
diff --git a/lib/collector/remote-method.js b/lib/collector/remote-method.js
index 897b366939..3e77168e62 100644
--- a/lib/collector/remote-method.js
+++ b/lib/collector/remote-method.js
@@ -92,7 +92,7 @@ RemoteMethod.prototype._reportDataUsage = function reportDataUsage(sent, receive
* you're doing it wrong.
*
* @param {object} payload Serializable payload.
- * @param {object} [nrHeaders=null] NR request headers from connect response.
+ * @param {object} [nrHeaders] NR request headers from connect response.
* @param {Function} callback What to do next. Gets passed any error.
*/
RemoteMethod.prototype.invoke = function invoke(payload, nrHeaders, callback) {
@@ -359,7 +359,7 @@ RemoteMethod.prototype._path = function _path({ redactLicenseKey } = {}) {
return url.format({
pathname: RAW_METHOD_PATH,
- query: query
+ query
})
}
@@ -374,9 +374,9 @@ RemoteMethod.prototype._headers = function _headers(options) {
const headers = {
// select the virtual host on the server end
- 'Host': this.endpoint.host,
+ Host: this.endpoint.host,
'User-Agent': agent,
- 'Connection': 'Keep-Alive',
+ Connection: 'Keep-Alive',
'Content-Length': byteLength(options.body),
'Content-Type': 'application/json'
}
diff --git a/lib/config/attribute-filter.js b/lib/config/attribute-filter.js
index 2cfa623e3e..af2dd2c074 100644
--- a/lib/config/attribute-filter.js
+++ b/lib/config/attribute-filter.js
@@ -347,7 +347,7 @@ function _convertRulesToRegex(rules) {
// 'fizz.bang' => ['fizz', 'bang']
// '*' => ['\\*']
return rule
- .replace(/([.*+?|\\^$()\[\]])/g, function cleaner(m) {
+ .replace(/([.*+?|\\^$()[\]])/g, function cleaner(m) {
return '\\' + m
})
.split('.')
diff --git a/lib/config/default.js b/lib/config/default.js
index f862697761..08896ac5cf 100644
--- a/lib/config/default.js
+++ b/lib/config/default.js
@@ -33,7 +33,7 @@ defaultConfig.definition = () => ({
*/
app_name: {
formatter(val) {
- return val.split(/;|,/).map((k) => k.trim())
+ return val.split(/[;,]/).map((k) => k.trim())
},
default: []
},
@@ -1589,7 +1589,7 @@ function buildConfig(definition, config, paths = [], objectKeys = 1) {
if (type === 'string') {
assignConfigValue({ config: conf, key, value, paths })
} else if (type === 'object') {
- if (value.hasOwnProperty('default')) {
+ if (Object.prototype.hasOwnProperty.call(value, 'default')) {
assignConfigValue({ config: conf, key, value: value.default, paths })
} else {
// add the current leaf node key to the paths and recurse through function again
diff --git a/lib/config/harvest-config-validator.js b/lib/config/harvest-config-validator.js
index 324e8608cf..ed1aefd543 100644
--- a/lib/config/harvest-config-validator.js
+++ b/lib/config/harvest-config-validator.js
@@ -24,6 +24,6 @@ function isValidHarvestValue(value) {
}
module.exports = {
- isValidHarvestConfig: isValidHarvestConfig,
- isValidHarvestValue: isValidHarvestValue
+ isValidHarvestConfig,
+ isValidHarvestValue
}
diff --git a/lib/config/index.js b/lib/config/index.js
index 93fa5e0e4b..7ce5befb38 100644
--- a/lib/config/index.js
+++ b/lib/config/index.js
@@ -593,7 +593,7 @@ Config.prototype._fromServer = function _fromServer(params, key) {
case 'rum.load_episodes_file':
// Ensure the most secure setting is applied to the settings below
// when enabling them.
- case 'attributes.include_enabled':
+ case 'attributes.include_enabled': // eslint-disable-line no-fallthrough
case 'strip_exception_messages.enabled':
case 'transaction_tracer.record_sql':
this.logUnsupported(params, key)
@@ -695,7 +695,7 @@ function _validateThenUpdateStatusCodes(remote, local, remoteKey, localKey) {
let valid = true
valueToTest.forEach(function validateArray(thingToTest) {
- if (!('string' === typeof thingToTest || 'number' === typeof thingToTest)) {
+ if (!(typeof thingToTest === 'string' || typeof thingToTest === 'number')) {
logger.warn(
'Saw SSC (ignore|expect)_status_code that is not a number or string,' +
'will not merge: %s',
@@ -740,7 +740,7 @@ function _validateThenUpdateErrorClasses(remote, local, remoteKey, localKey) {
let valid = true
Object.keys(valueToTest).forEach(function validateArray(key) {
const thingToTest = valueToTest[key]
- if ('string' !== typeof thingToTest) {
+ if (typeof thingToTest !== 'string') {
logger.warn(
'Saw SSC (ignore|expect)_class that is not a string, will not merge: %s',
thingToTest
@@ -783,7 +783,7 @@ function _validateThenUpdateErrorMessages(remote, local, remoteKey, localKey) {
return
}
- if ('object' !== typeof valueToTest) {
+ if (typeof valueToTest !== 'object') {
logger.warn(
'Saw SSC (ignore|expect)_message that is primitive/scaler, will not merge: %s',
valueToTest
@@ -986,7 +986,7 @@ function getHostnameSafe() {
_hostname = os.hostname()
}
return _hostname
- } catch (e) {
+ } catch {
const addresses = this.getIPAddresses()
if (this.process_host.ipv_preference === '6' && addresses.ipv6) {
@@ -1054,7 +1054,7 @@ Config.prototype._fromPassed = function _fromPassed(external, internal, arbitrar
let node = null
try {
node = external[key]
- } catch (err) {
+ } catch {
logger.warn('Error thrown on access of user config for key: %s', key)
return
}
@@ -1160,7 +1160,7 @@ Config.prototype._fromEnvironment = function _fromEnvironment(
const envVar = deriveEnvVar(key, paths)
setFromEnv({ config, key, envVar, paths })
} else if (type === 'object') {
- if (value.hasOwnProperty('env')) {
+ if (Object.prototype.hasOwnProperty.call(value, 'env')) {
setFromEnv({
config,
key,
@@ -1168,7 +1168,7 @@ Config.prototype._fromEnvironment = function _fromEnvironment(
paths,
formatter: value.formatter
})
- } else if (value.hasOwnProperty('default')) {
+ } else if (Object.prototype.hasOwnProperty.call(value, 'default')) {
const envVar = deriveEnvVar(key, paths)
setFromEnv({ config, key, envVar, formatter: value.formatter, paths })
} else {
@@ -1659,7 +1659,7 @@ Config.prototype.publicSettings = function publicSettings() {
let settings = Object.create(null)
for (const key in this) {
- if (this.hasOwnProperty(key) && !REMOVE_BEFORE_SEND.has(key)) {
+ if (Object.prototype.hasOwnProperty.call(this, key) && !REMOVE_BEFORE_SEND.has(key)) {
if (HSM.REDACT_BEFORE_SEND.has(key)) {
const value = this[key]
settings[key] = redactValue(value)
@@ -1825,10 +1825,8 @@ function getOrCreateInstance() {
try {
_configInstance = initialize()
} catch (err) {
- /* eslint-disable no-console */
console.error('New Relic for Node.js is disabled due to an error:')
console.error(err.stack)
- /* eslint-enable no-console */
// Config construction has potential to throw due to invalid settings.
// This allows the agent to return a stub api without crashing the process.
diff --git a/lib/config/merge-server-config.js b/lib/config/merge-server-config.js
index ec501d2044..1b8c8bd46f 100644
--- a/lib/config/merge-server-config.js
+++ b/lib/config/merge-server-config.js
@@ -8,7 +8,6 @@
const { isSimpleObject } = require('../util/objects')
class MergeServerConfig {
- // eslint-disable-next-line max-params
updateNestedIfChanged(config, remote, local, remoteKey, localKey, logger) {
const value = remote[remoteKey]
@@ -46,7 +45,7 @@ class MergeServerConfig {
if (Array.isArray(local[localKey][element]) && Array.isArray(value[element])) {
// if both key-values are arrays, push the remote value onto the local array
value[element].forEach((elementValue) => {
- if (-1 === local[localKey][element].indexOf(elementValue)) {
+ if (local[localKey][element].indexOf(elementValue) === -1) {
local[localKey][element].push(elementValue)
}
})
diff --git a/lib/custom-events/custom-event-aggregator.js b/lib/custom-events/custom-event-aggregator.js
index 776f74cf22..575220c67b 100644
--- a/lib/custom-events/custom-event-aggregator.js
+++ b/lib/custom-events/custom-event-aggregator.js
@@ -12,7 +12,6 @@ class CustomEventAggregator extends EventAggregator {
constructor(opts, agent) {
opts = opts || {}
opts.method = opts.method || 'custom_event_data'
- opts.metricNames = opts.metricNames
super(opts, agent)
}
diff --git a/lib/db/query-parsers/sql.js b/lib/db/query-parsers/sql.js
index c728704dbc..d471b68c9b 100644
--- a/lib/db/query-parsers/sql.js
+++ b/lib/db/query-parsers/sql.js
@@ -9,6 +9,7 @@ const logger = require('../../logger').child({ component: 'sql_query_parser' })
const StatementMatcher = require('../statement-matcher')
const stringify = require('json-stringify-safe')
+/* eslint-disable no-useless-escape, sonarjs/slow-regex, sonarjs/duplicates-in-character-class */
const OPERATIONS = [
new StatementMatcher(
'select',
@@ -22,6 +23,7 @@ const OPERATIONS = [
new StatementMatcher('delete', /^[^\S]*?delete[^\S]+?from[^\S]+([^\s\n\r,(;]+)/gim)
]
const COMMENT_PATTERN = /\/\\*.*?\\*\//g
+/* eslint-enable no-useless-escape, sonarjs/slow-regex, sonarjs/duplicates-in-character-class */
// This must be called synchronously after the initial db call for backtraces to
// work correctly
diff --git a/lib/db/slow-query.js b/lib/db/slow-query.js
index c80bcf881a..2b460d1168 100644
--- a/lib/db/slow-query.js
+++ b/lib/db/slow-query.js
@@ -23,6 +23,7 @@ function SlowQuery(segment, type, query, trace) {
function normalizedHash(value) {
// We leverage the last 15 hex digits which will fit in a signed long
+ // eslint-disable-next-line sonarjs/hashing
return parseInt(crypto.createHash('sha1').update(value).digest('hex').slice(-15), 16)
}
diff --git a/lib/db/statement-matcher.js b/lib/db/statement-matcher.js
index 1e79a38ef1..495d8cdaa0 100644
--- a/lib/db/statement-matcher.js
+++ b/lib/db/statement-matcher.js
@@ -6,6 +6,7 @@
'use strict'
// ( ` database` . ` table ` )
+// eslint-disable-next-line sonarjs/slow-regex
const CLEANER = /^\(?(?:([`'"]?)(.*?)\1\.)?([`'"]?)(.*?)\3\)?$/
function StatementMatcher(operation, operationPattern) {
@@ -42,8 +43,8 @@ StatementMatcher.prototype.getParsedStatement = function getParsedStatement(sql)
// be used for datastore attributes.
return {
operation: this.operation,
- database: database,
- collection: collection,
+ database,
+ collection,
query: sql
}
}
diff --git a/lib/db/utils.js b/lib/db/utils.js
index 64871ec7db..af722bb18b 100644
--- a/lib/db/utils.js
+++ b/lib/db/utils.js
@@ -10,6 +10,10 @@ module.exports.extractDatabaseChangeFromUse = extractDatabaseChangeFromUse
function extractDatabaseChangeFromUse(sql) {
// The character ranges for this were pulled from
// http://dev.mysql.com/doc/refman/5.7/en/identifiers.html
+
+ // Suppressing a warning on this regex because it is not obvious what this
+ // regex does, and we don't want to break anything.
+ // eslint-disable-next-line sonarjs/slow-regex, sonarjs/duplicates-in-character-class
const match = /^\s*use[^\w`]+([\w$_\u0080-\uFFFF]+|`[^`]+`)[\s;]*$/i.exec(sql)
return (match && match[1]) || null
}
diff --git a/lib/environment.js b/lib/environment.js
index 694e679b45..3bf34f9ad8 100644
--- a/lib/environment.js
+++ b/lib/environment.js
@@ -67,7 +67,7 @@ function clearSetting(name) {
* the provided root.
*
* @param {string} root - Path to start listing packages from.
- * @param {Array} [packages=[]] - Array to append found packages to.
+ * @param {Array} [packages] - Array to append found packages to.
*/
async function listPackages(root, packages = []) {
_log('Listing packages in %s', root)
@@ -496,7 +496,7 @@ async function getJSON() {
_log('Getting environment JSON')
try {
await refresh()
- } catch (err) {
+ } catch {
// swallow error
}
diff --git a/lib/errors/error-collector.js b/lib/errors/error-collector.js
index 70ecf319c1..210cafdce4 100644
--- a/lib/errors/error-collector.js
+++ b/lib/errors/error-collector.js
@@ -10,7 +10,6 @@ const errorsModule = require('./index')
const logger = require('../logger').child({ component: 'error_tracer' })
const urltils = require('../util/urltils')
const Exception = require('../errors').Exception
-const Transaction = require('../transaction')
const errorHelper = require('./helper')
const createError = errorsModule.createError
const createEvent = errorsModule.createEvent
@@ -312,6 +311,7 @@ class ErrorCollector {
}
if (this.config.error_collector.capture_events === true) {
+ // eslint-disable-next-line sonarjs/pseudo-random
const priority = (transaction && transaction.priority) || Math.random()
const event = createEvent(transaction, errorTrace, exception.timestamp, this.config)
this.eventAggregator.add(event, priority)
@@ -360,7 +360,7 @@ class ErrorCollector {
* @param {Transaction} transaction the transaction associated with the trace
*/
_maybeRecordErrorMetrics(errorTrace, transaction) {
- const isExpectedError = true === errorTrace[4].intrinsics['error.expected']
+ const isExpectedError = errorTrace[4].intrinsics['error.expected'] === true
if (isExpectedError) {
this.metrics.getOrCreateMetric(NAMES.ERRORS.EXPECTED).incrementCallCount()
diff --git a/lib/errors/error-event-aggregator.js b/lib/errors/error-event-aggregator.js
index fe8bcf0265..3743dce7ab 100644
--- a/lib/errors/error-event-aggregator.js
+++ b/lib/errors/error-event-aggregator.js
@@ -36,6 +36,7 @@ class ErrorEventAggregator extends EventAggregator {
return [this.runId, metrics, eventData]
}
+
reconfigure(config) {
super.reconfigure(config)
if (!this.enabled) {
diff --git a/lib/errors/helper.js b/lib/errors/helper.js
index 8a3ff7cf37..eeead080d5 100644
--- a/lib/errors/helper.js
+++ b/lib/errors/helper.js
@@ -90,9 +90,9 @@ module.exports = {
}
return {
- name: name,
- message: message,
- type: type
+ name,
+ message,
+ type
}
},
@@ -119,7 +119,7 @@ module.exports = {
return false
}
- return -1 !== config.error_collector.ignore_classes.indexOf(errorInfo.type)
+ return config.error_collector.ignore_classes.indexOf(errorInfo.type) !== -1
},
shouldIgnoreErrorMessage: function shouldIgnoreErrorMessage(errorInfo, config) {
diff --git a/lib/errors/index.js b/lib/errors/index.js
index 5f6b4af510..5655a71326 100644
--- a/lib/errors/index.js
+++ b/lib/errors/index.js
@@ -16,7 +16,6 @@ const {
maybeAddDatabaseAttributes
} = require('../util/attributes')
const synthetics = require('../synthetics')
-const Transaction = require('../transaction')
const ERROR_EXPECTED_PATH = 'error.expected'
class Exception {
@@ -121,8 +120,8 @@ function isValidErrorGroupOutput(output) {
function maybeAddAgentAttributes(attributes, exception) {
if (exception.errorGroupCallback) {
const callbackInput = {
- 'error': exception.error,
- 'customAttributes': Object.assign({}, attributes.userAttributes),
+ error: exception.error,
+ customAttributes: Object.assign({}, attributes.userAttributes),
'request.uri': attributes.agentAttributes['request.uri'],
'http.statusCode': attributes.agentAttributes['http.statusCode'],
'http.method': attributes.agentAttributes['request.method'],
@@ -153,6 +152,7 @@ function maybeAddUserAttributes(userAttributes, exception, config) {
for (const key in customAttributes) {
if (props.hasOwn(customAttributes, key)) {
const dest = config.attributeFilter.filterTransaction(DESTINATIONS.ERROR_EVENT, key)
+ // eslint-disable-next-line sonarjs/bitwise-operators
if (dest & DESTINATIONS.ERROR_EVENT) {
userAttributes[key] = customAttributes[key]
}
@@ -208,7 +208,6 @@ function createEvent(transaction, error, timestamp, config) {
return [intrinsicAttributes, userAttributes, agentAttributes]
}
-// eslint-disable-next-line max-params
function _getErrorEventIntrinsicAttrs(transaction, errorClass, message, expected, timestamp, conf) {
// the server expects seconds instead of milliseconds
if (timestamp) {
@@ -216,10 +215,10 @@ function _getErrorEventIntrinsicAttrs(transaction, errorClass, message, expected
}
const attributes = {
- 'type': 'TransactionError',
+ type: 'TransactionError',
'error.class': errorClass,
'error.message': conf.high_security ? '' : message,
- 'timestamp': timestamp,
+ timestamp,
'error.expected': expected
}
diff --git a/lib/grpc/connection.js b/lib/grpc/connection.js
index 29579a78d5..49929b4340 100644
--- a/lib/grpc/connection.js
+++ b/lib/grpc/connection.js
@@ -43,7 +43,7 @@ class GrpcConnection extends EventEmitter {
*
* @param {object} infiniteTracingConfig config item config.infinite_tracing
* @param {MetricAggregator} metrics metric aggregator, for supportability metrics
- * @param {number} [reconnectDelayMs=15000] number of milliseconds to wait before reconnecting
+ * @param {number} [reconnectDelayMs] number of milliseconds to wait before reconnecting
* for error states that require a reconnect delay.
*/
constructor(infiniteTracingConfig, metrics, reconnectDelayMs = DEFAULT_RECONNECT_DELAY_MS) {
diff --git a/lib/header-attributes.js b/lib/header-attributes.js
index 9e12d086b2..9a93a57908 100644
--- a/lib/header-attributes.js
+++ b/lib/header-attributes.js
@@ -17,42 +17,42 @@ const COLLECTED_REQUEST_HEADERS = [
]
const HEADER_ATTR_NAMES = {
- 'accept': 'accept',
+ accept: 'accept',
'accept-charset': 'acceptCharset',
'accept-encoding': 'acceptEncoding',
'access-control-allow-headers': 'accessControlAllowHeaders',
'access-control-allow-methods': 'accessControlAllowMethods',
'access-control-allow-origin': 'accessControlAllowOrigin',
- 'age': 'age',
- 'allow': 'allow',
- 'authorization': 'authorization',
+ age: 'age',
+ allow: 'allow',
+ authorization: 'authorization',
'cache-control': 'cacheControl',
- 'connection': 'connection',
- 'cookie': 'cookie',
+ connection: 'connection',
+ cookie: 'cookie',
'content-encoding': 'contentEncoding',
'content-length': 'contentLength',
'content-type': 'contentType',
- 'date': 'date',
- 'etag': 'eTag',
- 'expect': 'expect',
- 'expires': 'expires',
- 'forwarded': 'forwarded',
- 'host': 'host',
+ date: 'date',
+ etag: 'eTag',
+ expect: 'expect',
+ expires: 'expires',
+ forwarded: 'forwarded',
+ host: 'host',
'if-match': 'ifMatch',
'if-modified-since': 'ifModifiedSince',
'last-modified': 'lastModified',
- 'location': 'location',
- 'newrelic': 'newrelic',
- 'origin': 'origin',
+ location: 'location',
+ newrelic: 'newrelic',
+ origin: 'origin',
'proxy-authorization': 'proxyAuthorization',
- 'referer': 'referer',
- 'refresh': 'refresh',
- 'server': 'server',
+ referer: 'referer',
+ refresh: 'refresh',
+ server: 'server',
'set-cookie': 'setCookie',
'transfer-encoding': 'transferEncoding',
'user-agent': 'userAgent',
- 'upgrade': 'upgrade',
- 'vary': 'vary',
+ upgrade: 'upgrade',
+ vary: 'vary',
'x-correlation-id': 'xCorrelationId',
'x-csrf-token': 'xCsrfToken',
'x-forwarded-for': 'xForwardedFor',
@@ -95,6 +95,7 @@ function _headerToCamelCase(header) {
const newHeader = header.charAt(0).toLowerCase() + header.slice(1)
// Converts headers in the form 'header-name' to be in the form 'headerName'
+ // eslint-disable-next-line sonarjs/slow-regex
return newHeader.replace(/[\W_]+(\w)/g, function capitalize(m, $1) {
return $1.toUpperCase()
})
diff --git a/lib/instrumentation/@elastic/elasticsearch.js b/lib/instrumentation/@elastic/elasticsearch.js
index bc88e2a417..dc2976acbe 100644
--- a/lib/instrumentation/@elastic/elasticsearch.js
+++ b/lib/instrumentation/@elastic/elasticsearch.js
@@ -108,7 +108,7 @@ function parsePath(pathString, method) {
const path = pathString.split('/')
if (method === 'PUT' && path.length === 2) {
collection = path?.[1] || defaultCollection
- operation = `index.create`
+ operation = 'index.create'
return { collection, operation }
}
path.forEach((segment, idx) => {
@@ -116,7 +116,7 @@ function parsePath(pathString, method) {
let opname
if (segment === '_search') {
collection = path?.[prev] || defaultCollection
- operation = `search`
+ operation = 'search'
} else if (segment[0] === '_') {
opname = segment.substring(1)
collection = path?.[prev] || defaultCollection
diff --git a/lib/instrumentation/@opensearch-project/opensearch.js b/lib/instrumentation/@opensearch-project/opensearch.js
index b1593114de..12fb670dbc 100644
--- a/lib/instrumentation/@opensearch-project/opensearch.js
+++ b/lib/instrumentation/@opensearch-project/opensearch.js
@@ -112,7 +112,7 @@ function parsePath(pathString, method) {
const path = pathString.split('/')
if (method === 'PUT' && path.length === 2) {
collection = path?.[1] || defaultCollection
- operation = `index.create`
+ operation = 'index.create'
return { collection, operation }
}
path.forEach((segment, idx) => {
@@ -120,7 +120,7 @@ function parsePath(pathString, method) {
let opname
if (segment === '_search') {
collection = path?.[prev] || defaultCollection
- operation = `search`
+ operation = 'search'
} else if (segment[0] === '_') {
opname = segment.substring(1)
collection = path?.[prev] || defaultCollection
diff --git a/lib/instrumentation/@prisma/client.js b/lib/instrumentation/@prisma/client.js
index fd5a86fe3d..f72ff86cfb 100644
--- a/lib/instrumentation/@prisma/client.js
+++ b/lib/instrumentation/@prisma/client.js
@@ -84,9 +84,10 @@ function retrieveQuery(args, pkgVersion) {
return extractQueryArgs(args, pkgVersion)
}
- // cast to string obj to attach symbol
- // this is done to tell query parser that we need to split string
- // to extract contents
+ // Cast to string obj to attach symbol. It _must_ be a `String` instance.
+ // This is done to tell query parser that we need to split the string
+ // to extract contents.
+ // eslint-disable-next-line no-new-wrappers, sonarjs/no-primitive-wrappers
const clientMethod = new String(args[0].clientMethod)
clientMethod[prismaModelCall] = true
return clientMethod
diff --git a/lib/instrumentation/amqplib/utils.js b/lib/instrumentation/amqplib/utils.js
index 1f1032b2e9..c47baacd74 100644
--- a/lib/instrumentation/amqplib/utils.js
+++ b/lib/instrumentation/amqplib/utils.js
@@ -30,7 +30,7 @@ function describeMessage({ host, port }) {
const [message] = args
if (!message?.properties) {
- shim.logger.debug({ message: message }, 'Failed to find message in consume arguments.')
+ shim.logger.debug({ message }, 'Failed to find message in consume arguments.')
return null
}
diff --git a/lib/instrumentation/aws-sdk/v3/common.js b/lib/instrumentation/aws-sdk/v3/common.js
index 71e59d2553..cc6dbaedb8 100644
--- a/lib/instrumentation/aws-sdk/v3/common.js
+++ b/lib/instrumentation/aws-sdk/v3/common.js
@@ -34,9 +34,7 @@ function headerMiddleware(shim, config, next) {
* @param {Shim} shim
* @param {Object} config AWS command configuration
* @param {function} next next function in middleware chain
- * @param {Object} contxt AWS command context
- * cons
- * @param context
+ * @param {Object} context AWS command context
* @returns {function}
*/
function attrMiddleware(shim, config, next, context) {
@@ -46,11 +44,10 @@ function attrMiddleware(shim, config, next, context) {
region = await config.region()
} catch (err) {
shim.logger.debug(err, 'Failed to get the AWS region')
- } finally {
- const result = await next(args)
- addAwsAttributes({ result, config, region, shim, context })
- return result
}
+ const result = await next(args)
+ addAwsAttributes({ result, config, region, shim, context })
+ return result
}
}
diff --git a/lib/instrumentation/aws-sdk/v3/lambda.js b/lib/instrumentation/aws-sdk/v3/lambda.js
index 478c6be046..0b81887f92 100644
--- a/lib/instrumentation/aws-sdk/v3/lambda.js
+++ b/lib/instrumentation/aws-sdk/v3/lambda.js
@@ -31,13 +31,12 @@ function resourceIdMiddleware(shim, config, next) {
'cloud.resource_id',
`arn:aws:lambda:${region}:${accountId}:function:${functionName}`
)
- segment.addAttribute('cloud.platform', `aws_lambda`)
+ segment.addAttribute('cloud.platform', 'aws_lambda')
}
} catch (err) {
shim.logger.debug(err, 'Failed to add AWS cloud resource id to segment')
- } finally {
- return result
}
+ return result
}
}
diff --git a/lib/instrumentation/bluebird.js b/lib/instrumentation/bluebird.js
index 36e86009c3..5b520787cf 100644
--- a/lib/instrumentation/bluebird.js
+++ b/lib/instrumentation/bluebird.js
@@ -13,6 +13,7 @@
// http://bluebirdjs.com/docs/api/cancellation.html
module.exports = function initialize(agent, bluebird, moduleName, shim) {
+ // eslint-disable-next-line sonarjs/no-globals-shadowing
const Promise = bluebird.Promise
const proto = Promise && Promise.prototype
if (!proto) {
diff --git a/lib/instrumentation/connect.js b/lib/instrumentation/connect.js
index 4ee1eaf685..b15969a0b1 100644
--- a/lib/instrumentation/connect.js
+++ b/lib/instrumentation/connect.js
@@ -38,7 +38,7 @@ module.exports = function initialize(agent, connect, moduleName, shim) {
function wrapMiddleware(shim, middleware, name, route) {
const spec = new MiddlewareSpec({
matchArity: true,
- route: route,
+ route,
type: shim.MIDDLEWARE,
next: shim.LAST,
req: shim.FIRST
diff --git a/lib/instrumentation/core/http-outbound.js b/lib/instrumentation/core/http-outbound.js
index fb3d2c4918..2cf58c81fa 100644
--- a/lib/instrumentation/core/http-outbound.js
+++ b/lib/instrumentation/core/http-outbound.js
@@ -13,7 +13,6 @@ const shimmer = require('../../shimmer')
const url = require('url')
const copy = require('../../util/copy')
const symbols = require('../../symbols')
-const http = require('http')
const synthetics = require('../../synthetics')
const { URL } = require('node:url')
@@ -200,7 +199,7 @@ function instrumentRequest({ agent, opts, makeRequest, host, port, hostname }, s
// TODO: abstract header logic shared with TransactionShim#insertCATRequestHeaders
function maybeAddDtCatHeaders(agent, transaction, outboundHeaders, headers = {}) {
if (agent.config.distributed_tracing.enabled) {
- if (!!(headers[symbols.disableDT] || headers['x-new-relic-disable-dt'])) {
+ if (headers[symbols.disableDT] || headers['x-new-relic-disable-dt']) {
logger.trace('Distributed tracing disabled by instrumentation.')
// do not try to delete this header because AWS will fail with signature fail
// See: https://github.com/newrelic/node-newrelic/issues/1549
diff --git a/lib/instrumentation/core/http.js b/lib/instrumentation/core/http.js
index 549f87714c..4d256c93b7 100644
--- a/lib/instrumentation/core/http.js
+++ b/lib/instrumentation/core/http.js
@@ -74,6 +74,7 @@ function wrapEmitWithTransaction(agent, emit, isHTTPS) {
tracer.bindEmitter(response, segment)
// the error tracer needs a URL for tracing, even though naming overwrites
+
transaction.parsedUrl = url.parse(request.url, true)
transaction.url = urltils.obfuscatePath(agent.config, transaction.parsedUrl.pathname)
transaction.verb = request.method
@@ -207,7 +208,7 @@ function storeTxInfo(transaction, request, response) {
}
const txInfo = {
- transaction: transaction,
+ transaction,
segmentStack: [],
errorHandled: false,
error: null
@@ -441,7 +442,6 @@ module.exports = function initialize(agent, http, moduleName) {
// FIXME: will this ever not be called?
shimmer.wrapMethod(http, 'http', 'createServer', function wrapMethod(createServer) {
- // eslint-disable-next-line no-unused-vars
return function setDispatcher(requestListener) {
agent.environment.setDispatcher('http')
return createServer.apply(this, arguments)
diff --git a/lib/instrumentation/core/timers.js b/lib/instrumentation/core/timers.js
index 1de2aa0d03..ce4447e367 100644
--- a/lib/instrumentation/core/timers.js
+++ b/lib/instrumentation/core/timers.js
@@ -7,7 +7,6 @@
const { RecorderSpec } = require('../../shim/specs')
const symbols = require('../../symbols')
-const Timers = require('timers')
module.exports = initialize
diff --git a/lib/instrumentation/express.js b/lib/instrumentation/express.js
index f245834218..3cb6c94849 100644
--- a/lib/instrumentation/express.js
+++ b/lib/instrumentation/express.js
@@ -144,7 +144,7 @@ function wrapResponse(shim, response) {
function wrapMiddleware(shim, middleware, name, route) {
let method = null
const spec = new MiddlewareSpec({
- route: route,
+ route,
type: shim.MIDDLEWARE,
matchArity: true,
req: shim.FIRST
diff --git a/lib/instrumentation/kafkajs/consumer.js b/lib/instrumentation/kafkajs/consumer.js
index c8a71ada44..7e1eb1b708 100644
--- a/lib/instrumentation/kafkajs/consumer.js
+++ b/lib/instrumentation/kafkajs/consumer.js
@@ -140,7 +140,7 @@ function handler({ consumer }) {
}
return new MessageSpec({
- destinationType: `Topic/Consume`,
+ destinationType: 'Topic/Consume',
destinationName: data?.topic,
headers: data?.message?.headers
})
diff --git a/lib/instrumentation/mysql/mysql.js b/lib/instrumentation/mysql/mysql.js
index 432a878ecc..5f65e77f79 100644
--- a/lib/instrumentation/mysql/mysql.js
+++ b/lib/instrumentation/mysql/mysql.js
@@ -243,8 +243,8 @@ function extractQueryArgs(shim, args) {
}
return {
- query: query,
- callback: callback
+ query,
+ callback
}
}
diff --git a/lib/instrumentation/openai.js b/lib/instrumentation/openai.js
index 36e4487057..d159669224 100644
--- a/lib/instrumentation/openai.js
+++ b/lib/instrumentation/openai.js
@@ -180,7 +180,7 @@ function instrumentStream({ agent, shim, request, response, segment }) {
}
shim.wrap(response, 'iterator', function wrapIterator(shim, orig) {
- return async function* wrappedIterator() {
+ return async function * wrappedIterator() {
let content = ''
let role = ''
let chunk
@@ -218,7 +218,6 @@ function instrumentStream({ agent, shim, request, response, segment }) {
})
}
-/* eslint-disable sonarjs/cognitive-complexity */
module.exports = function initialize(agent, openai, moduleName, shim) {
if (shouldSkipInstrumentation(agent.config, shim)) {
shim.logger.debug(
diff --git a/lib/instrumentation/undici.js b/lib/instrumentation/undici.js
index cda18b0942..fca5d17bb7 100644
--- a/lib/instrumentation/undici.js
+++ b/lib/instrumentation/undici.js
@@ -10,7 +10,9 @@ const recordExternal = require('../metrics/recorders/http_external')
const logger = require('../logger').child({ component: 'undici' })
const NAMES = require('../metrics/names')
const symbols = require('../symbols')
+// eslint-disable-next-line n/no-unsupported-features/node-builtins
const { executionAsyncResource } = require('async_hooks')
+// eslint-disable-next-line n/no-unsupported-features/node-builtins
const diagnosticsChannel = require('diagnostics_channel')
const synthetics = require('../synthetics')
const urltils = require('../util/urltils')
@@ -100,7 +102,6 @@ function addDTHeaders({ transaction, config, request }) {
logger.trace('Both DT and CAT are disabled, not adding headers!')
}
- // eslint-disable-next-line guard-for-in
for (const key in outboundHeaders) {
request.addHeader(key, outboundHeaders[key])
}
diff --git a/lib/instrumentation/when/contextualizer.js b/lib/instrumentation/when/contextualizer.js
index ed95351e1c..f1050fc0a5 100644
--- a/lib/instrumentation/when/contextualizer.js
+++ b/lib/instrumentation/when/contextualizer.js
@@ -140,7 +140,8 @@ Contextualizer.prototype.getSegment = function getSegment() {
* @returns {object} same segment passed in
*/
Contextualizer.prototype.setSegment = function setSegment(segment) {
- return (this.context.segments[this.idx] = segment)
+ this.context.segments[this.idx] = segment
+ return this.context.segments[this.idx]
}
/**
diff --git a/lib/instrumentation/when/index.js b/lib/instrumentation/when/index.js
index ddace24ee9..daa4216b87 100644
--- a/lib/instrumentation/when/index.js
+++ b/lib/instrumentation/when/index.js
@@ -27,6 +27,7 @@ module.exports = function initialize(shim, when) {
wrapStaticMethods(when, spec.name, spec.$library)
// Wrap prototype methods.
+ // eslint-disable-next-line sonarjs/no-globals-shadowing
const Promise = when[spec.constructor]
wrapPrototype(Promise.prototype)
wrapStaticMethods(Promise, spec.constructor, spec.$static)
@@ -78,7 +79,7 @@ module.exports = function initialize(shim, when) {
*/
function wrappedPromise(executor) {
if (!(this instanceof wrappedPromise)) {
- return Promise(executor) // eslint-disable-line new-cap
+ return new Promise(executor)
}
const parent = agent.tracer.getSegment()
@@ -287,6 +288,7 @@ module.exports = function initialize(shim, when) {
ret = agent.tracer.bindFunction(fn, promSegment, true).apply(this, arguments)
} finally {
if (ret && typeof ret.then === 'function') {
+ // eslint-disable-next-line sonarjs/no-dead-store
ret = ctx.next[symbols.context].continue(ret)
}
}
diff --git a/lib/instrumentations.js b/lib/instrumentations.js
index 47dbfb0aea..1961000c27 100644
--- a/lib/instrumentations.js
+++ b/lib/instrumentations.js
@@ -19,32 +19,32 @@ module.exports = function instrumentations() {
'@node-redis/client': { type: InstrumentationDescriptor.TYPE_DATASTORE },
'@prisma/client': { type: InstrumentationDescriptor.TYPE_DATASTORE },
'@redis/client': { type: InstrumentationDescriptor.TYPE_DATASTORE },
- 'amqplib': { module: './instrumentation/amqplib' },
+ amqplib: { module: './instrumentation/amqplib' },
'aws-sdk': { module: './instrumentation/aws-sdk' },
- 'bluebird': { type: InstrumentationDescriptor.TYPE_PROMISE },
- 'bunyan': { type: InstrumentationDescriptor.TYPE_GENERIC },
+ bluebird: { type: InstrumentationDescriptor.TYPE_PROMISE },
+ bunyan: { type: InstrumentationDescriptor.TYPE_GENERIC },
'cassandra-driver': { type: InstrumentationDescriptor.TYPE_DATASTORE },
- 'connect': { type: InstrumentationDescriptor.TYPE_WEB_FRAMEWORK },
- 'express': { type: InstrumentationDescriptor.TYPE_WEB_FRAMEWORK },
- 'fastify': { type: InstrumentationDescriptor.TYPE_WEB_FRAMEWORK },
+ connect: { type: InstrumentationDescriptor.TYPE_WEB_FRAMEWORK },
+ express: { type: InstrumentationDescriptor.TYPE_WEB_FRAMEWORK },
+ fastify: { type: InstrumentationDescriptor.TYPE_WEB_FRAMEWORK },
'generic-pool': { type: InstrumentationDescriptor.TYPE_GENERIC },
- 'ioredis': { type: InstrumentationDescriptor.TYPE_DATASTORE },
- 'kafkajs': { type: InstrumentationDescriptor.TYPE_MESSAGE },
- 'koa': { module: './instrumentation/koa' },
- 'langchain': { module: './instrumentation/langchain' },
- 'memcached': { type: InstrumentationDescriptor.TYPE_DATASTORE },
- 'mongodb': { type: InstrumentationDescriptor.TYPE_DATASTORE },
- 'mysql': { module: './instrumentation/mysql' },
- 'next': { module: './instrumentation/nextjs' },
- 'openai': { type: InstrumentationDescriptor.TYPE_GENERIC },
- 'pg': { type: InstrumentationDescriptor.TYPE_DATASTORE },
- 'pino': { module: './instrumentation/pino' },
- 'q': { type: null },
- 'redis': { type: InstrumentationDescriptor.TYPE_DATASTORE },
- 'restify': { type: InstrumentationDescriptor.TYPE_WEB_FRAMEWORK },
- 'superagent': { type: InstrumentationDescriptor.TYPE_GENERIC },
- 'when': { module: './instrumentation/when' },
- 'winston': { type: InstrumentationDescriptor.TYPE_GENERIC },
+ ioredis: { type: InstrumentationDescriptor.TYPE_DATASTORE },
+ kafkajs: { type: InstrumentationDescriptor.TYPE_MESSAGE },
+ koa: { module: './instrumentation/koa' },
+ langchain: { module: './instrumentation/langchain' },
+ memcached: { type: InstrumentationDescriptor.TYPE_DATASTORE },
+ mongodb: { type: InstrumentationDescriptor.TYPE_DATASTORE },
+ mysql: { module: './instrumentation/mysql' },
+ next: { module: './instrumentation/nextjs' },
+ openai: { type: InstrumentationDescriptor.TYPE_GENERIC },
+ pg: { type: InstrumentationDescriptor.TYPE_DATASTORE },
+ pino: { module: './instrumentation/pino' },
+ q: { type: null },
+ redis: { type: InstrumentationDescriptor.TYPE_DATASTORE },
+ restify: { type: InstrumentationDescriptor.TYPE_WEB_FRAMEWORK },
+ superagent: { type: InstrumentationDescriptor.TYPE_GENERIC },
+ when: { module: './instrumentation/when' },
+ winston: { type: InstrumentationDescriptor.TYPE_GENERIC },
/**
* The modules below are listed here purely to take
@@ -58,8 +58,8 @@ module.exports = function instrumentations() {
'@azure/openai': { type: InstrumentationDescriptor.TYPE_TRACKING },
'@langchain/community/llms/bedrock': { type: InstrumentationDescriptor.TYPE_TRACKING },
'fancy-log': { type: InstrumentationDescriptor.TYPE_TRACKING },
- 'knex': { type: InstrumentationDescriptor.TYPE_TRACKING },
- 'loglevel': { type: InstrumentationDescriptor.TYPE_TRACKING },
- 'npmlog': { type: InstrumentationDescriptor.TYPE_TRACKING }
+ knex: { type: InstrumentationDescriptor.TYPE_TRACKING },
+ loglevel: { type: InstrumentationDescriptor.TYPE_TRACKING },
+ npmlog: { type: InstrumentationDescriptor.TYPE_TRACKING }
}
}
diff --git a/lib/llm-events/aws-bedrock/bedrock-response.js b/lib/llm-events/aws-bedrock/bedrock-response.js
index 0d5ec61319..7b645cfc06 100644
--- a/lib/llm-events/aws-bedrock/bedrock-response.js
+++ b/lib/llm-events/aws-bedrock/bedrock-response.js
@@ -30,7 +30,6 @@ class BedrockResponse {
#completions = []
#id
- /* eslint-disable sonarjs/cognitive-complexity */
/**
* @param {object} params
* @param {AwsBedrockMiddlewareResponse} params.response
@@ -76,7 +75,6 @@ class BedrockResponse {
this.#completions = body.results?.map((r) => r.outputText) ?? []
}
}
- /* eslint-enable sonarjs/cognitive-complexity */
/**
* The prompt responses returned by the model.
diff --git a/lib/llm-events/aws-bedrock/chat-completion-summary.js b/lib/llm-events/aws-bedrock/chat-completion-summary.js
index 20c4edd6c2..36492a0e73 100644
--- a/lib/llm-events/aws-bedrock/chat-completion-summary.js
+++ b/lib/llm-events/aws-bedrock/chat-completion-summary.js
@@ -10,6 +10,7 @@ const LlmEvent = require('./event')
/**
* @typedef {object} LlmChatCompletionSummaryParams
* @augments LlmEventParams
+ * @property
*/
/**
* @type {LlmChatCompletionSummaryParams}
diff --git a/lib/llm-events/aws-bedrock/embedding.js b/lib/llm-events/aws-bedrock/embedding.js
index 3cfa9e3b88..a1eec6e1bf 100644
--- a/lib/llm-events/aws-bedrock/embedding.js
+++ b/lib/llm-events/aws-bedrock/embedding.js
@@ -10,6 +10,7 @@ const LlmEvent = require('./event')
/**
* @typedef {object} LlmEmbeddingParams
* @augments LlmEventParams
+ * @property
*/
/**
* @type {LlmEmbeddingParams}
diff --git a/lib/llm-events/aws-bedrock/event.js b/lib/llm-events/aws-bedrock/event.js
index d5434b6546..407b2cab59 100644
--- a/lib/llm-events/aws-bedrock/event.js
+++ b/lib/llm-events/aws-bedrock/event.js
@@ -72,7 +72,7 @@ class LlmEvent {
* @param {object} agent The New Relic agent that provides access to the
* transaction.
*/
- set metadata(agent) {
+ set metadata(agent) { // eslint-disable-line accessor-pairs
const tx = agent.tracer.getTransaction()
const attrs = tx?.trace?.custom.get(DESTINATIONS.TRANS_SCOPE) || {}
for (const [k, v] of Object.entries(attrs)) {
diff --git a/lib/llm-events/aws-bedrock/stream-handler.js b/lib/llm-events/aws-bedrock/stream-handler.js
index 9eaf9b70cc..9543c9fbc9 100644
--- a/lib/llm-events/aws-bedrock/stream-handler.js
+++ b/lib/llm-events/aws-bedrock/stream-handler.js
@@ -191,7 +191,7 @@ class StreamHandler {
}
}
-async function* handleClaude() {
+async function * handleClaude() {
let currentBody = {}
let completion = ''
@@ -210,7 +210,7 @@ async function* handleClaude() {
}
}
-async function* handleClaude3() {
+async function * handleClaude3() {
let currentBody = {}
let stopReason
const completions = []
@@ -235,7 +235,7 @@ async function* handleClaude3() {
}
}
-async function* handleCohere() {
+async function * handleCohere() {
let currentBody = {}
const generations = []
try {
@@ -253,7 +253,7 @@ async function* handleCohere() {
}
}
-async function* handleCohereEmbed() {
+async function * handleCohereEmbed() {
let currentBody = {}
const embeddings = []
try {
@@ -271,7 +271,7 @@ async function* handleCohereEmbed() {
}
}
-async function* handleLlama() {
+async function * handleLlama() {
let currentBody = {}
let generation = ''
@@ -290,7 +290,7 @@ async function* handleLlama() {
}
}
-async function* handleTitan() {
+async function * handleTitan() {
const body = this.response.output.body
body.results = []
diff --git a/lib/llm-events/event.js b/lib/llm-events/event.js
index 444936f8ad..5bb2636eb3 100644
--- a/lib/llm-events/event.js
+++ b/lib/llm-events/event.js
@@ -8,6 +8,7 @@
const { DESTINATIONS } = require('../config/attribute-filter')
class BaseLlmEvent {
+ // eslint-disable-next-line accessor-pairs
set metadata(agent) {
const transaction = agent.tracer.getTransaction()
const attrs = transaction?.trace?.custom.get(DESTINATIONS.TRANS_SCOPE) || {}
diff --git a/lib/llm-events/langchain/chat-completion-summary.js b/lib/llm-events/langchain/chat-completion-summary.js
index 1a7a20d264..020ebbd727 100644
--- a/lib/llm-events/langchain/chat-completion-summary.js
+++ b/lib/llm-events/langchain/chat-completion-summary.js
@@ -25,8 +25,8 @@ const defaultParams = {
}
class LangChainCompletionSummary extends LangChainEvent {
- duration;
- ['response.number_of_messages'] = 0
+ duration
+ 'response.number_of_messages' = 0
constructor(params = defaultParams) {
params = Object.assign({}, defaultParams, params)
diff --git a/lib/llm-events/langchain/event.js b/lib/llm-events/langchain/event.js
index e0e28ca7fd..1beea0eced 100644
--- a/lib/llm-events/langchain/event.js
+++ b/lib/llm-events/langchain/event.js
@@ -73,6 +73,7 @@ class LangChainEvent extends BaseEvent {
}
}
+ // eslint-disable-next-line accessor-pairs
set langchainMeta(value) {
if (isSimpleObject(value) === false) {
return
diff --git a/lib/llm-events/langchain/vector-search.js b/lib/llm-events/langchain/vector-search.js
index 9df95cf3b4..526c8f6be9 100644
--- a/lib/llm-events/langchain/vector-search.js
+++ b/lib/llm-events/langchain/vector-search.js
@@ -22,8 +22,8 @@ const defaultParams = {
}
class LangChainVectorSearch extends LangChainEvent {
- duration;
- ['response.number_of_documents'] = 0
+ duration
+ 'response.number_of_documents' = 0
constructor(params) {
params = Object.assign({}, defaultParams, params)
diff --git a/lib/logger.js b/lib/logger.js
index 80a12c69c0..3fd5259ebc 100644
--- a/lib/logger.js
+++ b/lib/logger.js
@@ -49,11 +49,9 @@ if (config) {
default:
stream = fs.createWriteStream(config.logging.filepath, { flags: 'a+', mode: 0o600 })
stream.on('error', function logStreamOnError(err) {
- /* eslint-disable no-console */
// Since our normal logging didn't work, dump this to stderr.
console.error('New Relic failed to open log file ' + config.logging.filepath)
console.error(err)
- /* eslint-enable no-console */
})
}
logger.pipe(stream)
diff --git a/lib/metrics/index.js b/lib/metrics/index.js
index b44c932aa5..cc06d597eb 100644
--- a/lib/metrics/index.js
+++ b/lib/metrics/index.js
@@ -209,7 +209,6 @@ Metrics.prototype.merge = function merge(other, adjustStartTime) {
// Loop through all scopes and merge them. Since we know `.scoped` has a `null`
// prototype we don't need to worry about own property checks.
- // eslint-disable-next-line guard-for-in
for (const scope in other.scoped) {
_merge(this._resolve(scope), other.scoped[scope])
}
diff --git a/lib/metrics/mapper.js b/lib/metrics/mapper.js
index 788b881d9e..eb4af5f1ae 100644
--- a/lib/metrics/mapper.js
+++ b/lib/metrics/mapper.js
@@ -77,14 +77,14 @@ MetricMapper.prototype.map = function map(name, scope) {
if (this.scoped[scope] && this.scoped[scope][name]) {
return this.scoped[scope][name]
}
- return { name: name, scope: scope }
+ return { name, scope }
}
if (this.unscoped[name]) {
return this.unscoped[name]
}
- return { name: name }
+ return { name }
}
module.exports = MetricMapper
diff --git a/lib/metrics/names.js b/lib/metrics/names.js
index 139da8ab5b..8aedc1c38b 100644
--- a/lib/metrics/names.js
+++ b/lib/metrics/names.js
@@ -332,7 +332,7 @@ const LOGGING = {
PINO: `${SUPPORTABILITY.LOGGING}/${NODEJS.PREFIX}pino/enabled`,
WINSTON: `${SUPPORTABILITY.LOGGING}/${NODEJS.PREFIX}winston/enabled`
},
- DROPPED: `Logging/Forwarding/Dropped`,
+ DROPPED: 'Logging/Forwarding/Dropped',
SEEN: `${LOGGING_FORWARDING_PREFIX}/Seen`,
SENT: `${LOGGING_FORWARDING_PREFIX}/Sent`,
FORWARDING: `${LOGGING_FORWARDING_PREFIX}/${NODEJS.PREFIX}`,
diff --git a/lib/metrics/normalizer.js b/lib/metrics/normalizer.js
index fedae10287..fbb7414483 100644
--- a/lib/metrics/normalizer.js
+++ b/lib/metrics/normalizer.js
@@ -125,10 +125,10 @@ MetricNormalizer.prototype.load = function load(json) {
*/
function processNameRule(rule, ctx) {
if (!rule.pattern) {
- return logger.error({ rule: rule }, 'Simple naming rules require a pattern.')
+ return logger.error({ rule }, 'Simple naming rules require a pattern.')
}
if (!rule.name) {
- return logger.error({ rule: rule }, 'Simple naming rules require a replacement name.')
+ return logger.error({ rule }, 'Simple naming rules require a replacement name.')
}
const precedence = rule.precedence
@@ -242,19 +242,19 @@ MetricNormalizer.prototype.normalize = function normalize(path) {
// that would increase memory overhead by creating additional array
this.emit('appliedRule', rule, normalized, last)
- logger.trace({ rule: rule, type: this.type }, 'Normalized %s to %s.', last, normalized)
+ logger.trace({ rule, type: this.type }, 'Normalized %s to %s.', last, normalized)
last = normalized
}
if (rule.isTerminal) {
- logger.trace({ rule: rule }, 'Terminating normalization.')
+ logger.trace({ rule }, 'Terminating normalization.')
break
}
}
// Return the normalized path.
return {
- matched: matched,
+ matched,
ignore: ignored,
value: this.formatter(normalized, path, this.config)
}
diff --git a/lib/metrics/normalizer/rule.js b/lib/metrics/normalizer/rule.js
index 26820f71d1..68bc206cf1 100644
--- a/lib/metrics/normalizer/rule.js
+++ b/lib/metrics/normalizer/rule.js
@@ -114,7 +114,8 @@ NormalizerRule.prototype.apply = function apply(input) {
// For ignore rules, just see if we match and return either `null` or the
// original input.
if (this.ignore) {
- return (this.matched = this.matches(input)) ? null : input
+ this.matched = this.matches(input)
+ return this.matched ? null : input
}
this.matched = false
diff --git a/lib/metrics/normalizer/tx_segment.js b/lib/metrics/normalizer/tx_segment.js
index 8f8c95b650..74c6e3b5b0 100644
--- a/lib/metrics/normalizer/tx_segment.js
+++ b/lib/metrics/normalizer/tx_segment.js
@@ -45,9 +45,11 @@ TxSegmentNormalizer.prototype.normalize = function normalize(path) {
if (prev === '*') {
return
}
- result.push((prev = '*'))
+ prev = '*'
+ result.push(prev)
} else {
- result.push((prev = part))
+ prev = part
+ result.push(prev)
}
}
diff --git a/lib/prioritized-attributes.js b/lib/prioritized-attributes.js
index 9ff1479ff1..e7d2bdbd48 100644
--- a/lib/prioritized-attributes.js
+++ b/lib/prioritized-attributes.js
@@ -36,6 +36,7 @@ class PrioritizedAttributes {
const attrs = Object.create(null)
for (const [key, attr] of this.attributes) {
+ // eslint-disable-next-line sonarjs/bitwise-operators
if (!(attr.destinations & dest)) {
continue
}
@@ -203,6 +204,6 @@ function makeFilter(scope) {
}
module.exports = {
- PrioritizedAttributes: PrioritizedAttributes,
- ATTRIBUTE_PRIORITY: ATTRIBUTE_PRIORITY
+ PrioritizedAttributes,
+ ATTRIBUTE_PRIORITY
}
diff --git a/lib/priority-queue.js b/lib/priority-queue.js
index 49a9fdda3e..140cce17dc 100644
--- a/lib/priority-queue.js
+++ b/lib/priority-queue.js
@@ -33,7 +33,7 @@ PriorityQueue.prototype.add = function add(value, priority) {
if (this.limit <= 0) {
return false
}
- priority = priority || Math.random()
+ priority = priority || Math.random() // eslint-disable-line sonarjs/pseudo-random
if (this.length === this.limit) {
return this._replace(value, priority)
}
diff --git a/lib/reservoir.js b/lib/reservoir.js
index fc4491e1b5..abcfe74990 100644
--- a/lib/reservoir.js
+++ b/lib/reservoir.js
@@ -29,7 +29,7 @@ Reservoir.prototype.add = function add(item) {
// This is effectively the same as adding the new element to the
// end, swapping the last element (the new one) with a random element in the list,
// then dropping the last element (the potentially swapped one) in the list.
- const toReplace = Math.floor(Math.random() * (this.seen + 2))
+ const toReplace = Math.floor(Math.random() * (this.seen + 2)) // eslint-disable-line sonarjs/pseudo-random
if (toReplace < this.limit) {
this._data[toReplace] = item
}
diff --git a/lib/sampler.js b/lib/sampler.js
index 96c7c66f0f..55d17c7704 100644
--- a/lib/sampler.js
+++ b/lib/sampler.js
@@ -140,11 +140,11 @@ function sampleGc(agent, nativeMetrics) {
module.exports = {
state: 'stopped',
- sampleMemory: sampleMemory,
- checkEvents: checkEvents,
- sampleCpu: sampleCpu,
- sampleGc: sampleGc,
- sampleLoop: sampleLoop,
+ sampleMemory,
+ checkEvents,
+ sampleCpu,
+ sampleGc,
+ sampleLoop,
nativeMetrics: null,
start: function start(agent) {
diff --git a/lib/shim/conglomerate-shim.js b/lib/shim/conglomerate-shim.js
index a29650c482..93c3281a1f 100644
--- a/lib/shim/conglomerate-shim.js
+++ b/lib/shim/conglomerate-shim.js
@@ -39,18 +39,23 @@ class ConglomerateShim extends Shim {
get GENERIC() {
return InstrumentationDescriptor.TYPE_GENERIC
}
+
get DATASTORE() {
return InstrumentationDescriptor.TYPE_DATASTORE
}
+
get MESSAGE() {
return InstrumentationDescriptor.TYPE_MESSAGE
}
+
get PROMISE() {
return InstrumentationDescriptor.TYPE_PROMISE
}
+
get TRANSACTION() {
return InstrumentationDescriptor.TYPE_TRANSACTION
}
+
get WEB_FRAMEWORK() {
return InstrumentationDescriptor.TYPE_WEB_FRAMEWORK
}
diff --git a/lib/shim/datastore-shim.js b/lib/shim/datastore-shim.js
index b883e17390..3691e943af 100644
--- a/lib/shim/datastore-shim.js
+++ b/lib/shim/datastore-shim.js
@@ -378,7 +378,7 @@ function parseQuery(query, nodule) {
let collection = parsed.collection
// strip enclosing special characters from collection (table) name
if (typeof collection === 'string' && collection.length > 2) {
- if (/^[\[{'"`]/.test(collection)) {
+ if (/^[[{'"`]/.test(collection)) {
collection = collection.substring(1)
}
if (/[\]}'"`]$/.test(collection)) {
diff --git a/lib/shim/message-shim/consume.js b/lib/shim/message-shim/consume.js
index 97c011df74..3ee7862995 100644
--- a/lib/shim/message-shim/consume.js
+++ b/lib/shim/message-shim/consume.js
@@ -4,9 +4,10 @@
*/
'use strict'
+
const genericRecorder = require('../../metrics/recorders/generic')
const { _nameMessageSegment } = require('./common')
-const specs = require('../specs')
+
module.exports = createRecorder
/**
diff --git a/lib/shim/message-shim/index.js b/lib/shim/message-shim/index.js
index 6ecd9eba02..cca4f78129 100644
--- a/lib/shim/message-shim/index.js
+++ b/lib/shim/message-shim/index.js
@@ -192,7 +192,7 @@ function setLibrary(library) {
this._transportType = LIBRARY_TRANSPORT_TYPES[library]
}
- this._logger = this._logger.child({ library: library })
+ this._logger = this._logger.child({ library })
this.logger.trace({ metrics: this._metrics }, 'Library metric names set')
}
diff --git a/lib/shim/message-shim/subscribe-consume.js b/lib/shim/message-shim/subscribe-consume.js
index 8a38e805f5..0ce7d70e75 100644
--- a/lib/shim/message-shim/subscribe-consume.js
+++ b/lib/shim/message-shim/subscribe-consume.js
@@ -219,18 +219,16 @@ function createConsumerWrapper({ shim, spec, consumer }) {
// Execute the original function and attempt to hook in the transaction
// finish.
- let ret = null
- try {
- ret = shim.applySegment(consumer, tx.baseSegment, true, this, args)
- } finally {
- if (shim.isPromise(ret)) {
- shim.logger.trace('Got a promise, attaching tx %s ending to promise', tx.id)
- ret = shim.interceptPromise(ret, endTransaction)
- } else if (!tx.handledExternally) {
- // We have no way of knowing when this transaction ended! ABORT!
- shim.logger.trace('Immediately ending message tx %s', tx.id)
- setImmediate(endTransaction)
- }
+ let ret = shim.applySegment(consumer, tx.baseSegment, true, this, args)
+
+ if (shim.isPromise(ret)) {
+ shim.logger.trace('Got a promise, attaching tx %s ending to promise', tx.id)
+
+ ret = shim.interceptPromise(ret, endTransaction)
+ } else if (!tx.handledExternally) {
+ // We have no way of knowing when this transaction ended! ABORT!
+ shim.logger.trace('Immediately ending message tx %s', tx.id)
+ setImmediate(endTransaction)
}
return ret
diff --git a/lib/shim/promise-shim.js b/lib/shim/promise-shim.js
index ef3c864927..02b925bf42 100644
--- a/lib/shim/promise-shim.js
+++ b/lib/shim/promise-shim.js
@@ -106,6 +106,7 @@ class PromiseShim extends Shim {
nodule,
properties,
new ClassWrapSpec({
+ // eslint-disable-next-line sonarjs/no-globals-shadowing
pre: function prePromise(shim, Promise, name, args) {
// We are expecting one function argument for executor, anything else is
// non-standard, do not attempt to wrap. Also do not attempt to wrap if
@@ -115,6 +116,7 @@ class PromiseShim extends Shim {
}
_wrapExecutorContext(shim, args)
},
+ // eslint-disable-next-line sonarjs/no-globals-shadowing
post: function postPromise(shim, Promise, name, args) {
// This extra property is added by `_wrapExecutorContext` in the pre step.
const executor = args[0]
@@ -414,14 +416,11 @@ function wrapHandler({ handler, index, argsLength, useAllParams, ctx, shim }) {
promSegment = segment
}
- let ret = null
- try {
- ret = shim.applySegment(handler, promSegment, true, this, arguments)
- } finally {
- if (ret && typeof ret.then === 'function') {
- ret = ctx.handler[symbols.context].continueContext(ret)
- }
+ let ret = shim.applySegment(handler, promSegment, true, this, arguments)
+ if (ret && typeof ret.then === 'function') {
+ ret = ctx.handler[symbols.context].continueContext(ret)
}
+
return ret
}
}
@@ -582,7 +581,8 @@ class Contextualizer {
}
setSegment(segment) {
- return (this.context.segments[this.idx] = segment)
+ this.context.segments[this.idx] = segment
+ return this.context.segments[this.idx]
}
toJSON() {
diff --git a/lib/shim/shim.js b/lib/shim/shim.js
index 0a393500f1..bb93e2d922 100644
--- a/lib/shim/shim.js
+++ b/lib/shim/shim.js
@@ -570,7 +570,8 @@ function wrapExport(nodule, spec) {
// export.
nodule = nodule.default
}
- return (this._toExport = this.wrap(nodule, null, spec))
+ this._toExport = this.wrap(nodule, null, spec)
+ return this._toExport
}
/**
@@ -605,9 +606,9 @@ function getExport(defaultExport) {
*/
function isWrapped(nodule, property) {
if (property) {
- return !!(nodule?.[property]?.[symbols.wrapped] === this.id)
+ return nodule?.[property]?.[symbols.wrapped] === this.id
}
- return !!(nodule?.[symbols.wrapped] === this.id)
+ return nodule?.[symbols.wrapped] === this.id
}
/**
@@ -750,8 +751,8 @@ function _doRecord({ segment, args, segDesc, shouldCreateSegment, shim, fn, name
// Now bind any callbacks specified in the segment descriptor.
_bindAllCallbacks.call(this, shim, fn, name, args, {
spec: segDesc,
- segment: segment,
- shouldCreateSegment: shouldCreateSegment
+ segment,
+ shouldCreateSegment
})
// Apply the function, and (if it returned a stream) bind that too.
@@ -764,7 +765,7 @@ function _doRecord({ segment, args, segDesc, shouldCreateSegment, shim, fn, name
shim.logger.trace('Binding return value as stream.')
_bindStream(shim, ret, segment, {
event: shim.isString(segDesc.stream) ? segDesc.stream : null,
- shouldCreateSegment: shouldCreateSegment
+ shouldCreateSegment
})
} else if (segDesc.promise && shim.isPromise(ret)) {
shim.logger.trace('Binding return value as Promise.')
@@ -953,7 +954,7 @@ function bindSegment(nodule, property, segment, full) {
// segment is `null`, and thus `true` (the full param) is detected as the
// segment.
if (segment != null && !this.isObject(segment)) {
- this.logger.debug({ segment: segment }, 'Segment is not a segment, not binding.')
+ this.logger.debug({ segment }, 'Segment is not a segment, not binding.')
return nodule
}
@@ -1142,7 +1143,6 @@ function storeSegment(obj, segment) {
}
}
-/* eslint-disable max-params */
/**
* Sets the given segment as the active one for the duration of the function's
* execution.
@@ -1185,7 +1185,6 @@ function applySegment(func, segment, full, context, args, inContextCB) {
return this.tracer.bindFunction(runInContextCb, segment, full).apply(context, args)
}
-/* eslint-enable max-params */
/**
* Creates a new segment.
@@ -1544,7 +1543,8 @@ function proxy(source, properties, dest) {
return source[prop]
},
set: function proxySet(val) {
- return (source[prop] = val)
+ source[prop] = val
+ return source[prop]
}
})
})
@@ -1654,7 +1654,6 @@ function unwrapAll() {
// -------------------------------------------------------------------------- //
-/* eslint-disable no-unused-vars */
/**
* Coerces the given spec into a function which {@link Shim#wrap} can use.
* returns WrapFunction The spec itself if spec is a function, otherwise a
@@ -1666,7 +1665,6 @@ function unwrapAll() {
function _specToFunction(spec) {
throw new Error('Declarative specs are not implemented yet.')
}
-/* eslint-enable no-unused-vars */
/**
* Assigns the shim id and original on the wrapped item.
diff --git a/lib/shim/webframework-shim/index.js b/lib/shim/webframework-shim/index.js
index 91faa49360..54272f6214 100644
--- a/lib/shim/webframework-shim/index.js
+++ b/lib/shim/webframework-shim/index.js
@@ -134,7 +134,7 @@ function setFramework(framework) {
}
this.agent.environment.setFramework(framework)
- this._logger = this._logger.child({ framework: framework })
+ this._logger = this._logger.child({ framework })
this.logger.trace({ metrics: this._metrics }, 'Framework metric names set')
}
diff --git a/lib/shim/webframework-shim/middleware.js b/lib/shim/webframework-shim/middleware.js
index ed7cde37e7..33473e7ef5 100644
--- a/lib/shim/webframework-shim/middleware.js
+++ b/lib/shim/webframework-shim/middleware.js
@@ -111,7 +111,7 @@ function assignTxInfo({ shim, req, route, fnName, isErrorWare }) {
const txInfo = getTransactionInfo(shim, req)
if (!txInfo || !txInfo.transaction) {
shim.logger.debug(
- { txInfo: txInfo },
+ { txInfo },
'Could not get transaction info in %s (%s)',
route,
fnName
diff --git a/lib/shimmer.js b/lib/shimmer.js
index ac479ead09..ac425ea602 100644
--- a/lib/shimmer.js
+++ b/lib/shimmer.js
@@ -157,7 +157,6 @@ const shimmer = (module.exports = {
wrapped[key] = original[key]
})
wrapped[symbols.original] = original
- // eslint-disable-next-line camelcase
wrapped[symbols.unwrap] = function unwrap() {
nodule[method] = original
logger.trace('Removed instrumentation from %s.', fqmn)
@@ -304,7 +303,7 @@ const shimmer = (module.exports = {
} else {
const fileName = path.join(__dirname, 'instrumentation', moduleName + '.js')
shimmer.registerInstrumentation({
- moduleName: moduleName,
+ moduleName,
type: instrInfo.type,
onRequire: _firstPartyInstrumentation.bind(null, agent, fileName)
})
@@ -769,7 +768,7 @@ function hasValidRegisterOptions(opts) {
}
if (!opts.moduleName) {
- logger.warn(`Instrumentation registration failed, 'moduleName' not provided`)
+ logger.warn("Instrumentation registration failed, 'moduleName' not provided")
return false
}
diff --git a/lib/spans/base-span-streamer.js b/lib/spans/base-span-streamer.js
index b8ea567442..3f6146d17c 100644
--- a/lib/spans/base-span-streamer.js
+++ b/lib/spans/base-span-streamer.js
@@ -78,7 +78,7 @@ class BaseSpanStreamer {
* span, a drain event handler is setup to continue writing when possible.
*
* @param {*} data spans or span
- * @param {number} [spanLen=1] number of spans sent in a stream(defaults to 1)
+ * @param {number} [spanLen] number of spans sent in a stream(defaults to 1)
*/
send(data, spanLen = 1) {
// false indicates the stream has reached the highWaterMark
diff --git a/lib/spans/map-to-streaming-type.js b/lib/spans/map-to-streaming-type.js
index 58feff53e4..736eda1459 100644
--- a/lib/spans/map-to-streaming-type.js
+++ b/lib/spans/map-to-streaming-type.js
@@ -32,9 +32,6 @@ function mapToStreamingType(value) {
protoTypeString = isInteger ? INT_TYPE : DOUBLE_TYPE
break
}
- default: {
- protoTypeString = null
- }
}
if (protoTypeString) {
diff --git a/lib/spans/span-event-aggregator.js b/lib/spans/span-event-aggregator.js
index 5f1fd2e601..f1549dd796 100644
--- a/lib/spans/span-event-aggregator.js
+++ b/lib/spans/span-event-aggregator.js
@@ -62,7 +62,7 @@ class SpanEventAggregator extends EventAggregator {
* Attempts to add the given segment to the collection.
*
* @param {TraceSegment} segment - The segment to add.
- * @param {string} [parentId=null] - The GUID of the parent span.
+ * @param {string} [parentId] - The GUID of the parent span.
* @param isRoot
* @returns {boolean} True if the segment was added, or false if it was discarded.
*/
diff --git a/lib/spans/span-streamer.js b/lib/spans/span-streamer.js
index 69c5fa4500..4934333d25 100644
--- a/lib/spans/span-streamer.js
+++ b/lib/spans/span-streamer.js
@@ -9,10 +9,6 @@ const logger = require('../logger').child({ component: 'span-streamer' })
const BaseSpanStreamer = require('./base-span-streamer')
class SpanStreamer extends BaseSpanStreamer {
- constructor(licenseKey, connection, metrics, queueSize) {
- super(licenseKey, connection, metrics, queueSize)
- }
-
addToQueue(span) {
this.spans.push(span)
}
diff --git a/lib/spans/streaming-span-event-aggregator.js b/lib/spans/streaming-span-event-aggregator.js
index 7c05c6f99c..b8ec9f7543 100644
--- a/lib/spans/streaming-span-event-aggregator.js
+++ b/lib/spans/streaming-span-event-aggregator.js
@@ -93,9 +93,7 @@ class StreamingSpanEventAggregator extends Aggregator {
*
* This is here to implement the implicit interface
*/
- _toPayloadSync() {
- return
- }
+ _toPayloadSync() {}
/**
* Attempts to add the given segment to the collection.
diff --git a/lib/system-info.js b/lib/system-info.js
index eb02b9d1ac..ea7de32613 100644
--- a/lib/system-info.js
+++ b/lib/system-info.js
@@ -14,7 +14,6 @@ const logger = require('./logger.js').child({ component: 'system-info' })
const os = require('os')
const parseCpuInfo = require('./parse-proc-cpuinfo')
const parseMemInfo = require('./parse-proc-meminfo')
-const Agent = require('./agent')
const platform = os.platform()
module.exports = fetchSystemInfo
@@ -304,7 +303,7 @@ async function getSysctlValue(names = []) {
async function getProcInfo(procPath) {
try {
return await readProc(procPath)
- } catch (err) {
+ } catch {
// swallow the error if reading fails, logging handled in readProc()
return null
}
diff --git a/lib/timer.js b/lib/timer.js
index 738762722b..516a6eaf17 100644
--- a/lib/timer.js
+++ b/lib/timer.js
@@ -6,7 +6,6 @@
'use strict'
/**
-
* Explicit enumeration of the states a transaction can be in:
*
* PENDING upon instantiation (implicitly, no start time set)
diff --git a/lib/transaction/index.js b/lib/transaction/index.js
index d3c4214b4b..80e9d404c4 100644
--- a/lib/transaction/index.js
+++ b/lib/transaction/index.js
@@ -175,7 +175,7 @@ Transaction.prototype.probe = function probe(action, extra) {
if (this.traceStacks) {
this.traceStacks.push({
stack: new Error(action).stack.split('\n'),
- extra: extra
+ extra
})
}
}
@@ -379,7 +379,7 @@ function _partialNameFromUri(requestUrl, status) {
}
return {
- ignore: ignore,
+ ignore,
value: partialName
}
}
@@ -423,8 +423,8 @@ function finalizeNameFromUri(requestURL, statusCode) {
if (logger.traceEnabled()) {
logger.trace(
{
- requestURL: requestURL,
- statusCode: statusCode,
+ requestURL,
+ statusCode,
transactionId: this.id,
transactionName: this.name
},
@@ -657,6 +657,7 @@ Transaction.prototype.getScrubbedUrl = function getScrubbedUrl() {
if (!this.url) {
return
}
+
this.parsedUrl = url.parse(this.url)
}
@@ -881,7 +882,7 @@ Transaction.prototype.hasErrors = function _hasErrors() {
* @returns {boolean} true if all the errors/exceptions collected so far are expected errors
*/
Transaction.prototype.hasOnlyExpectedErrors = function hasOnlyExpectedErrors() {
- if (0 === this.exceptions.length) {
+ if (this.exceptions.length === 0) {
return false
}
@@ -1224,7 +1225,7 @@ function _acceptDistributedTracePayload(payload, transport) {
const trustedAccount = configTestResult
const trustedAccountKey = data.tk || data.ac
if (trustedAccountKey !== trustedAccount) {
- this.agent.recordSupportability(`DistributedTrace/AcceptPayload/Ignored/UntrustedAccount`)
+ this.agent.recordSupportability('DistributedTrace/AcceptPayload/Ignored/UntrustedAccount')
return
}
@@ -1361,9 +1362,11 @@ Transaction.prototype.isSampled = function isSampled() {
*/
Transaction.prototype._calculatePriority = function _calculatePriority() {
if (this.priority === null) {
+ // eslint-disable-next-line sonarjs/pseudo-random
this.priority = Math.random()
// We want to separate the priority roll from the decision roll to
// avoid biasing the priority range
+ // eslint-disable-next-line sonarjs/pseudo-random
this.sampled = this.agent.transactionSampler.shouldSample(Math.random())
if (this.sampled) {
this.priority += 1
diff --git a/lib/transaction/name-state.js b/lib/transaction/name-state.js
index e1eb4d9ac8..15ca2569d2 100644
--- a/lib/transaction/name-state.js
+++ b/lib/transaction/name-state.js
@@ -49,7 +49,7 @@ NameState.prototype.setName = function setName(prefix, verb, delimiter, path) {
this.setPrefix(prefix)
this.verb = verb && verb.toUpperCase()
this.delimiter = delimiter
- this.pathStack = path ? [{ path: path, params: null }] : []
+ this.pathStack = path ? [{ path, params: null }] : []
this._pathCache = null
this.markedPath = []
logger.trace('setName called on name state, path stack now %j', this.pathStack)
@@ -58,7 +58,7 @@ NameState.prototype.setName = function setName(prefix, verb, delimiter, path) {
NameState.prototype.getStatusName = function getStatusName(statusCode) {
const name = STATUS_CODE_NAMES[statusCode]
if (name) {
- if (LEGACY_NAMING.hasOwnProperty(this.prefix)) {
+ if (Object.prototype.hasOwnProperty.call(LEGACY_NAMING, this.prefix)) {
return _getName(this, name)
}
return NAMES.WEB.FRAMEWORK_PREFIX + '/' + _getName(this, name)
@@ -193,12 +193,13 @@ NameState.prototype.getPath = function getPath() {
}
}
- return (this._pathCache = path)
+ this._pathCache = path
+ return this._pathCache
}
NameState.prototype.getNameNotFound = function getNameNotFound() {
const name = _getName(this, '(not found)')
- if (LEGACY_NAMING.hasOwnProperty(this.prefix)) {
+ if (Object.prototype.hasOwnProperty.call(LEGACY_NAMING, this.prefix)) {
return name
}
return NAMES.WEB.FRAMEWORK_PREFIX + '/' + name
@@ -215,7 +216,7 @@ NameState.prototype.getName = function getName() {
NameState.prototype.getFullName = function getFullName() {
const name = this.getName()
- if (LEGACY_NAMING.hasOwnProperty(this.prefix)) {
+ if (Object.prototype.hasOwnProperty.call(LEGACY_NAMING, this.prefix)) {
return name
}
return NAMES.WEB.FRAMEWORK_PREFIX + '/' + name
diff --git a/lib/transaction/trace/exclusive-time-calculator.js b/lib/transaction/trace/exclusive-time-calculator.js
index fd2fb022ec..dbb5b1d685 100644
--- a/lib/transaction/trace/exclusive-time-calculator.js
+++ b/lib/transaction/trace/exclusive-time-calculator.js
@@ -33,7 +33,7 @@ class ExclusiveCalculator {
// children are all done (i.e. postorder)
this.parentStack.push({
childrenLeft: children.length,
- segment: segment,
+ segment,
childPairs: []
})
for (let i = children.length - 1; i >= 0; --i) {
diff --git a/lib/transaction/trace/segment.js b/lib/transaction/trace/segment.js
index 2fee25fb98..b39afb859b 100644
--- a/lib/transaction/trace/segment.js
+++ b/lib/transaction/trace/segment.js
@@ -450,7 +450,6 @@ TraceSegment.prototype.captureExternalAttributes = function captureExternalAttri
method = 'GET',
queryParams = {}
}) {
- // eslint-disable-next-line guard-for-in
for (const key in queryParams) {
this.addSpanAttribute(`request.parameters.${key}`, queryParams[key])
}
diff --git a/lib/transaction/tracecontext.js b/lib/transaction/tracecontext.js
index 033f7b66c1..51f71bef15 100644
--- a/lib/transaction/tracecontext.js
+++ b/lib/transaction/tracecontext.js
@@ -17,9 +17,9 @@ const W3C_TRACEPARENT_VERSION = '00'
const NR_TRACESTATE_VERSION = 0
// 255 (ff) explicitly not allowed for version
-const VERSION_VALID_RGX = /^((?![f]{2})[a-f0-9]{2})$/
-const TRACEID_VALID_RGX = /^((?![0]{32})[a-f0-9]{32})$/
-const PARENTID_VALID_RGX = /^((?![0]{16})[a-f0-9]{16})$/
+const VERSION_VALID_RGX = /^((?!f{2})[a-f0-9]{2})$/
+const TRACEID_VALID_RGX = /^((?!0{32})[a-f0-9]{32})$/
+const PARENTID_VALID_RGX = /^((?!0{16})[a-f0-9]{16})$/
const FLAGS_VALID_RGX = /^([a-f0-9]{2})$/
const FLAGS = {
diff --git a/lib/util/cat.js b/lib/util/cat.js
index 78cb15d0cc..8c9776ae2c 100644
--- a/lib/util/cat.js
+++ b/lib/util/cat.js
@@ -54,7 +54,7 @@ cat.parseCatData = function parseCatData(id, transactionId, encKey) {
if (transactionId) {
try {
externalTransaction = JSON.parse(hashes.deobfuscateNameUsingKey(transactionId, encKey))
- } catch (e) {
+ } catch {
logger.trace(`Got an unparsable CAT header ${HTTP_CAT_ID_HEADER} %s`, transactionId)
}
}
@@ -199,7 +199,7 @@ cat.extractCatHeaders = function extractCatHeaders(headers) {
let id = null
let transactionId = null
let appData = null
- // eslint-disable-next-line guard-for-in
+
for (const key in headers) {
if (MATCH_CAT_ID_HEADER.test(key)) {
id = headers[key]
@@ -256,7 +256,7 @@ cat.parseAppData = function parseAppData(config, obfAppData) {
let appData = null
try {
appData = JSON.parse(hashes.deobfuscateNameUsingKey(obfAppData, config.encoding_key))
- } catch (e) {
+ } catch {
logger.warn(`Got an unparsable CAT header ${HTTP_CAT_APP_DATA_HEADER}: %s`, obfAppData)
return
}
diff --git a/lib/util/copy.js b/lib/util/copy.js
index 0930138f91..9bc8ecd316 100644
--- a/lib/util/copy.js
+++ b/lib/util/copy.js
@@ -13,7 +13,7 @@ exports.shallow = shallowCopy
* Performs a shallow copy of all properties on the source object.
*
* @param {object} source - The object to copy the properties from.
- * @param {object} [dest={}] - The object to copy the properties to.
+ * @param {object} [dest] - The object to copy the properties to.
* @returns {object} The destination object.
*/
function shallowCopy(source, dest) {
diff --git a/lib/util/deep-equal.js b/lib/util/deep-equal.js
index a363161515..549da53263 100644
--- a/lib/util/deep-equal.js
+++ b/lib/util/deep-equal.js
@@ -15,7 +15,7 @@ module.exports = function exports(a, b) {
* Added this special check because the original implementation of this
* did not consider two NaNs as equal, so preserving existing functionality
*/
- if (a !== a && b !== b) {
+ if (a !== a && b !== b) { // eslint-disable-line no-self-compare
return false
}
diff --git a/lib/util/hashes.js b/lib/util/hashes.js
index 1439e0064a..8404f68405 100644
--- a/lib/util/hashes.js
+++ b/lib/util/hashes.js
@@ -46,6 +46,7 @@ function calculatePathHash(appName, pathName, referingPathHash) {
}
function getHash(appName, txName) {
+ // eslint-disable-next-line sonarjs/hashing
const md5sum = crypto.createHash('md5')
md5sum.update(appName + ';' + txName, 'utf8')
let buf = md5sum.digest()
@@ -60,6 +61,7 @@ const rand = Math.random
const max32 = Math.pow(2, 32) - 1
function randInt32() {
+ // eslint-disable-next-line sonarjs/pseudo-random
return Math.floor(rand() * max32)
}
diff --git a/lib/util/label-parser.js b/lib/util/label-parser.js
index ca59044ecf..9b87c346af 100644
--- a/lib/util/label-parser.js
+++ b/lib/util/label-parser.js
@@ -121,6 +121,7 @@ function truncate(str, max) {
const chr = str.charCodeAt(i)
if (chr >= 0xd800 && chr <= 0xdbff && i !== str.length) {
// Handle UTF-16 surrogate pairs.
+ // eslint-disable-next-line sonarjs/updated-loop-counter
i += 1
}
diff --git a/lib/util/logger.js b/lib/util/logger.js
index f49e9452b5..b3133f3e52 100644
--- a/lib/util/logger.js
+++ b/lib/util/logger.js
@@ -244,8 +244,7 @@ Logger.prototype.write = function write(level, args, extra) {
} else if (typeof args[i] === 'object') {
try {
args[i] = stringify(args[i])
- } catch (err) {
- // eslint-disable-line no-unused-vars
+ } catch {
this.debug('Failed to stringfy object for log')
args[i] = '[UNPARSABLE OBJECT]'
}
@@ -259,8 +258,7 @@ Logger.prototype.write = function write(level, args, extra) {
let data = ''
try {
data = stringify(entry) + '\n'
- } catch (err) {
- // eslint-disable-line no-unused-vars
+ } catch {
this.debug('Unable to stringify log message')
}
diff --git a/lib/util/sql/obfuscate.js b/lib/util/sql/obfuscate.js
index 6688bd15dc..43ba3d12d3 100644
--- a/lib/util/sql/obfuscate.js
+++ b/lib/util/sql/obfuscate.js
@@ -7,16 +7,19 @@
module.exports = obfuscate
+// eslint-disable-next-line sonarjs/slow-regex
const singleQuote = /'(?:''|[^'])*?(?:\\'.*|'(?!'))/
+// eslint-disable-next-line sonarjs/slow-regex
const doubleQuote = /"(?:[^"]|"")*?(?:\\".*|"(?!"))/
const dollarQuote = /(\$(?!\d)[^$]*?\$).*?(?:\1|$)/
-const oracleQuote = /q'\[.*?(?:\]'|$)|q'\{.*?(?:\}'|$)|q'\<.*?(?:\>'|$)|q'\(.*?(?:\)'|$)/
+const oracleQuote = /q'\[.*?(?:\]'|$)|q'\{.*?(?:\}'|$)|q'<.*?(?:>'|$)|q'\(.*?(?:\)'|$)/
+// eslint-disable-next-line sonarjs/slow-regex
const comment = /(?:#|--).*?(?=\r|\n|$)/
-const multilineComment = /\/\*(?:[^\/]|\/[^*])*?(?:\*\/|\/\*.*)/
-const uuid = /\{?(?:[0-9a-f]\-*){32}\}?/
+const multilineComment = /\/\*(?:[^/]|\/[^*])*?(?:\*\/|\/\*.*)/
+const uuid = /\{?(?:[0-9a-f]-*){32}\}?/
const hex = /0x[0-9a-f]+/
const boolean = /\b(?:true|false|null)\b/
-const number = /-?\b(?:[0-9]+\.)?[0-9]+(e[+-]?[0-9]+)?/
+const number = /-?\b(?:\d+\.)?\d+(e[+-]?\d+)?/
const dialects = (obfuscate.dialects = Object.create(null))
diff --git a/lib/util/urltils.js b/lib/util/urltils.js
index 5548b48d79..d19c4d4732 100644
--- a/lib/util/urltils.js
+++ b/lib/util/urltils.js
@@ -9,7 +9,7 @@ const url = require('url')
const logger = require('../logger').child({ component: 'urltils' })
const LOCALHOST_NAMES = {
- 'localhost': true,
+ localhost: true,
'127.0.0.1': true,
'0.0.0.0': true,
'0:0:0:0:0:0:0:1': true,
@@ -28,7 +28,7 @@ module.exports = {
*
* @constant
*/
- LOCALHOST_NAMES: LOCALHOST_NAMES,
+ LOCALHOST_NAMES,
/**
* Checks if the given name is in the dictionary of localhost names.
@@ -180,7 +180,7 @@ module.exports = {
try {
const regexPattern = new RegExp(pattern, flags)
return path.replace(regexPattern, replacement)
- } catch (e) {
+ } catch {
logger.warn('Invalid regular expression for url_obfuscation.regex.pattern', pattern)
return path
}
diff --git a/lib/utilization/aws-info.js b/lib/utilization/aws-info.js
index 7205b62b4e..7f9b9abebb 100644
--- a/lib/utilization/aws-info.js
+++ b/lib/utilization/aws-info.js
@@ -9,6 +9,7 @@ const logger = require('../logger.js').child({ component: 'aws-info' })
const common = require('./common')
const NAMES = require('../metrics/names.js')
let results = null
+// eslint-disable-next-line sonarjs/no-hardcoded-ip
const INSTANCE_HOST = '169.254.169.254'
module.exports = fetchAWSInfo
diff --git a/lib/utilization/azure-info.js b/lib/utilization/azure-info.js
index a3ce0dcae7..8b4529594d 100644
--- a/lib/utilization/azure-info.js
+++ b/lib/utilization/azure-info.js
@@ -24,6 +24,7 @@ function fetchAzureInfo(agent, callback) {
return setImmediate(callback, null, results)
}
+ // eslint-disable-next-line sonarjs/no-hardcoded-ip
const instanceHost = '169.254.169.254'
const apiVersion = '2017-03-01'
const endpoint = '/metadata/instance/compute'
diff --git a/lib/utilization/docker-info.js b/lib/utilization/docker-info.js
index 043f67bf0c..e09894c529 100644
--- a/lib/utilization/docker-info.js
+++ b/lib/utilization/docker-info.js
@@ -111,7 +111,7 @@ function fetchDockerVendorInfo(agent, callback, logger = log) {
}
// try v2 path first and if null try parsing v1 path
- common.readProc(CGROUPS_V2_PATH, function getV2CGroup(err, data) {
+ common.readProc(CGROUPS_V2_PATH, function getV2CGroup(_, data) {
if (data === null) {
logger.debug(
`${CGROUPS_V2_PATH} not found, trying to parse container id from ${CGROUPS_V1_PATH}`
@@ -132,7 +132,7 @@ function fetchDockerVendorInfo(agent, callback, logger = log) {
// For some reason, we have a /proc/self/mountinfo but it does not have
// any Docker information in it (that we have detected). So we will
// fall back to trying the cgroups v1 file.
- logger.debug(`Attempting to fall back to cgroups v1 parsing.`)
+ logger.debug('Attempting to fall back to cgroups v1 parsing.')
findCGroupsV1(callback, logger)
},
logger
@@ -149,7 +149,7 @@ function fetchDockerVendorInfo(agent, callback, logger = log) {
* @param {object} [logger] internal logger instance
*/
function parseCGroupsV2(data, callback, logger = log) {
- const containerLine = new RegExp('/docker/containers/([0-9a-f]{64})/')
+ const containerLine = /\/docker\/containers\/([0-9a-f]{64})\//
const line = containerLine.exec(data)
if (line) {
logger.debug(`Found docker id from cgroups v2: ${line[1]}`)
@@ -168,7 +168,7 @@ function parseCGroupsV2(data, callback, logger = log) {
* @param {object} [logger] internal logger instance
*/
function findCGroupsV1(callback, logger = log) {
- common.readProc(CGROUPS_V1_PATH, function getCGroup(err, data) {
+ common.readProc(CGROUPS_V1_PATH, function getCGroup(_, data) {
if (!data) {
logger.debug(`${CGROUPS_V1_PATH} not found, exiting parsing containerId.`)
return callback(null)
diff --git a/lib/utilization/index.js b/lib/utilization/index.js
index b2971a5b6f..8d1c3d7356 100644
--- a/lib/utilization/index.js
+++ b/lib/utilization/index.js
@@ -23,7 +23,7 @@ function getVendors(agent, callback) {
let done = 0
let vendors = null
VENDOR_NAMES.forEach(function getVendorInfo(vendor) {
- VENDOR_METHODS[vendor](agent, function getInfo(err, result) {
+ VENDOR_METHODS[vendor](agent, function getInfo(_, result) {
logger.trace('Vendor %s finished.', vendor)
if (result) {
vendors = vendors || Object.create(null)
diff --git a/package.json b/package.json
index 7dc3a41387..2958f6c938 100644
--- a/package.json
+++ b/package.json
@@ -164,8 +164,9 @@
"integration": "npm run sub-install && BORP_CONF_FILE=.borp.int.yaml time c8 -o ./coverage/integration borp --timeout 600000 --reporter ./test/lib/test-reporter.mjs",
"integration:esm": "NODE_OPTIONS='--loader=./esm-loader.mjs' BORP_CONF_FILE=.borp.int-esm.yaml time c8 -o ./coverage/integration-esm borp --reporter ./test/lib/test-reporter.mjs",
"prepare-test": "npm run docker-env",
- "lint": "eslint ./*.{js,mjs} lib test bin",
- "lint:fix": "eslint --fix, ./*.{js,mjs} lib test bin",
+ "lint": "eslint --quiet .",
+ "lint:verbose": "eslint .",
+ "lint:fix": "eslint --fix .",
"public-docs": "jsdoc -c ./jsdoc-conf.jsonc",
"publish-docs": "./bin/publish-docs.sh",
"services": "DOCKER_PLATFORM=linux/$(uname -m) docker compose up -d --wait",
@@ -219,7 +220,6 @@
"@aws-sdk/s3-request-presigner": "^3.556.0",
"@koa/router": "^12.0.1",
"@matteo.collina/tspl": "^0.1.1",
- "@newrelic/eslint-config": "^0.3.0",
"@newrelic/newrelic-oss-cli": "^0.1.2",
"@newrelic/test-utilities": "^9.1.0",
"@octokit/rest": "^18.0.15",
@@ -236,10 +236,9 @@
"conventional-changelog-conventionalcommits": "^5.0.0",
"conventional-changelog-writer": "^5.0.1",
"conventional-commits-parser": "^3.2.4",
- "eslint": "^8.24.0",
- "eslint-plugin-disable": "^2.0.1",
- "eslint-plugin-jsdoc": "^48.0.5",
- "eslint-plugin-sonarjs": "^0.18.0",
+ "eslint": "^9.17.0",
+ "eslint-plugin-jsdoc": "^50.6.1",
+ "eslint-plugin-sonarjs": "^3.0.1",
"express": "*",
"git-raw-commits": "^2.0.11",
"glob": "^7.1.2",
@@ -251,6 +250,7 @@
"koa-router": "^12.0.1",
"lint-staged": "^11.0.0",
"lockfile-lint": "^4.9.6",
+ "neostandard": "^0.12.0",
"nock": "11.8.0",
"proxyquire": "^1.8.0",
"rimraf": "^2.6.3",
diff --git a/stub_api.js b/stub_api.js
index 9a0a9c5605..578118af91 100644
--- a/stub_api.js
+++ b/stub_api.js
@@ -9,8 +9,8 @@ const logger = require('./lib/logger.js')
const RealAPI = require('./api.js')
const TransactionHandle = require('./lib/transaction/handle')
-/* eslint-disable no-eval */
function stubFunction(name) {
+ // eslint-disable-next-line sonarjs/code-eval, no-eval
return eval(
'(function () {return function ' +
name +
@@ -21,7 +21,6 @@ function stubFunction(name) {
'}}())'
)
}
-/* eslint-enable no-eval */
function Stub() {}
diff --git a/test/benchmark/datastore-shim/shared.js b/test/benchmark/datastore-shim/shared.js
index 3f057f031a..148b00a724 100644
--- a/test/benchmark/datastore-shim/shared.js
+++ b/test/benchmark/datastore-shim/shared.js
@@ -12,7 +12,7 @@ const { OperationSpec, QuerySpec } = require('../../../lib/shim/specs')
const TestDatastore = require('./test-datastore')
function makeSuite(name) {
- return benchmark.createBenchmark({ name: name, runs: 10000 })
+ return benchmark.createBenchmark({ name, runs: 10000 })
}
function getTestDatastore(agent, instrumented) {
@@ -24,7 +24,7 @@ function getTestDatastore(agent, instrumented) {
return {
collection: 'test',
operation: 'test',
- query: query
+ query
}
})
diff --git a/test/benchmark/events/merge.bench.js b/test/benchmark/events/merge.bench.js
index 0f2d8e891f..aeb363ba91 100644
--- a/test/benchmark/events/merge.bench.js
+++ b/test/benchmark/events/merge.bench.js
@@ -73,7 +73,7 @@ suite.add({
},
fn: function () {
const ev = queue2.getRawEvents()
- const mapped = ev.map((e) => e.value) // eslint-disable-line no-unused-vars
+ ev.map((e) => e.value)
queue1.merge(ev)
}
})
diff --git a/test/benchmark/func-wrap.js b/test/benchmark/func-wrap.js
index e2b4cb3f77..4d20970983 100644
--- a/test/benchmark/func-wrap.js
+++ b/test/benchmark/func-wrap.js
@@ -58,7 +58,7 @@ suite.add({
return function () {
return fn.apply(this, arguments)
}
- }(test.func)) // eslint-disable-line prettier/prettier
+ }(test.func))
return test
}
})
diff --git a/test/benchmark/lib/db/query-parsers/sql.bench.js b/test/benchmark/lib/db/query-parsers/sql.bench.js
index 2f162fdee5..ca36c6afd3 100644
--- a/test/benchmark/lib/db/query-parsers/sql.bench.js
+++ b/test/benchmark/lib/db/query-parsers/sql.bench.js
@@ -46,7 +46,7 @@ for (const test of tests) {
suite.run()
function leadingMultiLineCommentSingleLine() {
- parseSql(`/* insert into bar some stuff */ insert into foo (col1)`)
+ parseSql('/* insert into bar some stuff */ insert into foo (col1)')
}
function leadingMultiLineCommentMultipleLines() {
@@ -56,23 +56,23 @@ function leadingMultiLineCommentMultipleLines() {
}
function singleEmbeddedMultiLineComment() {
- parseSql(`insert /* insert into bar */ into foo`)
+ parseSql('insert /* insert into bar */ into foo')
}
function multipleEmbeddedMultiLineComments() {
- parseSql(`insert /* comments! */ into /* insert into bar some stuff */ foo /* MOAR */ (col1)`)
+ parseSql('insert /* comments! */ into /* insert into bar some stuff */ foo /* MOAR */ (col1)')
}
function selectStatement() {
parseSql(
- `with foobar (col1) as cte select * from foo as a join on cte using (col1) where a.bar = 'baz'`
+ "with foobar (col1) as cte select * from foo as a join on cte using (col1) where a.bar = 'baz'"
)
}
function updateStatement() {
- parseSql(`update foo set bar = 'baz' where col1 = 1`)
+ parseSql("update foo set bar = 'baz' where col1 = 1")
}
function deleteStatement() {
- parseSql(`delete from foo where bar = 'baz'`)
+ parseSql("delete from foo where bar = 'baz'")
}
diff --git a/test/benchmark/promises/shared.js b/test/benchmark/promises/shared.js
index 13aba3b122..b510815cfa 100644
--- a/test/benchmark/promises/shared.js
+++ b/test/benchmark/promises/shared.js
@@ -53,7 +53,7 @@ const tests = [
function longThrowToEnd(Promise) {
return function runTest() {
- let prom = Promise.reject()
+ let prom = Promise.reject(Error('boom'))
for (let i = 0; i < NUM_PROMISES - 1; ++i) {
prom = prom.then(function () {})
}
@@ -65,10 +65,9 @@ const tests = [
return function runTest() {
const promises = []
for (let i = 0; i < NUM_PROMISES; ++i) {
- /* eslint-disable no-new */
promises.push(
- new Promise(function (res) {
- res()
+ new Promise(function (resolve) {
+ resolve()
})
)
}
@@ -81,13 +80,11 @@ const tests = [
const promises = []
for (let i = 0; i < NUM_PROMISES / 2; ++i) {
promises.push(
- new Promise(function (resolve) {
- resolve(
- new Promise(function (res) {
- setImmediate(res)
- })
- )
- })
+ Promise.resolve(
+ new Promise(function (resolve) {
+ setImmediate(resolve)
+ })
+ )
)
}
return Promise.all(promises)
@@ -99,8 +96,8 @@ const tests = [
let prom = Promise.resolve()
for (let i = 0; i < NUM_PROMISES / 2; ++i) {
prom = prom.then(function () {
- return new Promise(function (res) {
- setImmediate(res)
+ return new Promise(function (resolve) {
+ setImmediate(resolve)
})
})
}
diff --git a/test/benchmark/shim/introspection.bench.js b/test/benchmark/shim/introspection.bench.js
index 9ad7b6051e..9e20841607 100644
--- a/test/benchmark/shim/introspection.bench.js
+++ b/test/benchmark/shim/introspection.bench.js
@@ -46,8 +46,8 @@ suite.add({
name: 'shim.isPromise',
fn: function () {
return shim.isPromise(
- new Promise(function (res) {
- res()
+ new Promise(function (resolve) {
+ resolve()
})
)
}
diff --git a/test/benchmark/shim/segments.bench.js b/test/benchmark/shim/segments.bench.js
index 3905e6b216..095f656539 100644
--- a/test/benchmark/shim/segments.bench.js
+++ b/test/benchmark/shim/segments.bench.js
@@ -31,7 +31,7 @@ suite.add({
fn: function () {
const test = shared.getTest()
shim.record(test, 'func', function (shim, fn, name, args) {
- return new RecorderSpec({ name: name, args: args })
+ return new RecorderSpec({ name, args })
})
return test
}
diff --git a/test/benchmark/shim/shared.js b/test/benchmark/shim/shared.js
index 99cfb61db8..1dddf8789d 100644
--- a/test/benchmark/shim/shared.js
+++ b/test/benchmark/shim/shared.js
@@ -12,8 +12,8 @@ const Shim = require('../../../lib/shim/shim')
function makeSuite(name) {
const agent = helper.loadMockedAgent()
const shim = new Shim(agent, 'test-module', './')
- const suite = benchmark.createBenchmark({ name: name, delay: 0.01 })
- return { agent: agent, suite: suite, shim: shim }
+ const suite = benchmark.createBenchmark({ name, delay: 0.01 })
+ return { agent, suite, shim }
}
function getTest() {
diff --git a/test/benchmark/shim/utilities.bench.js b/test/benchmark/shim/utilities.bench.js
index 2c8845c029..c2304ac239 100644
--- a/test/benchmark/shim/utilities.bench.js
+++ b/test/benchmark/shim/utilities.bench.js
@@ -68,8 +68,8 @@ suite.add({
suite.add({
name: 'shim.interceptPromise',
fn: function () {
- const p = new Promise(function (res) {
- res()
+ const p = new Promise(function (resolve) {
+ resolve()
})
return shim.interceptPromise(p, function () {})
}
diff --git a/test/benchmark/shim/wrapReturn.bench.js b/test/benchmark/shim/wrapReturn.bench.js
index 8ca473e54a..31539543ad 100644
--- a/test/benchmark/shim/wrapReturn.bench.js
+++ b/test/benchmark/shim/wrapReturn.bench.js
@@ -15,21 +15,21 @@ const shim = s.shim
let test = null
const testFunctions = {
- 'defineProperty': function testDefProp() {
+ defineProperty: function testDefProp() {
Object.defineProperty(test.func, 'testProp', {
value: 4
})
},
- 'set': function testAssignment() {
+ set: function testAssignment() {
test.func.testProp = 4
},
- 'apply': function testApplication() {
+ apply: function testApplication() {
return test.func()
},
- 'construct': function testConstruction() {
+ construct: function testConstruction() {
return new test.func() //eslint-disable-line
},
- 'get': function testGet() {
+ get: function testGet() {
return test.func.testProp
},
'get unwrap': function testGetUnwrap() {
@@ -44,7 +44,7 @@ Object.keys(testFunctions).forEach((testName) => {
test = shared.getTest()
test.func.testProp = 1
shim.wrapReturn(test, 'func', function (shim, fn, fnName, ret) {
- return { ret: ret }
+ return { ret }
})
return test
},
diff --git a/test/benchmark/shim/wrapped.bench.js b/test/benchmark/shim/wrapped.bench.js
index 7a5eebd635..5b76da697b 100644
--- a/test/benchmark/shim/wrapped.bench.js
+++ b/test/benchmark/shim/wrapped.bench.js
@@ -34,7 +34,7 @@ suite.add({
before: function () {
test = shared.getTest()
shim.wrapReturn(test, 'func', function (shim, fn, fnName, ret) {
- return { ret: ret }
+ return { ret }
})
return test
},
@@ -48,7 +48,7 @@ suite.add({
before: function () {
test = shared.getTest()
shim.wrapClass(test, 'func', function (shim, fn, fnName, args) {
- return { args: args }
+ return { args }
})
return test
},
@@ -62,7 +62,7 @@ suite.add({
before: function () {
test = shared.getTest()
shim.wrapExport(test, function (shim, nodule) {
- return { nodule: nodule }
+ return { nodule }
})
return test
},
diff --git a/test/benchmark/shim/wrapping.bench.js b/test/benchmark/shim/wrapping.bench.js
index 0bb3270006..81e066cef8 100644
--- a/test/benchmark/shim/wrapping.bench.js
+++ b/test/benchmark/shim/wrapping.bench.js
@@ -29,7 +29,7 @@ suite.add({
fn: function () {
const test = shared.getTest()
shim.wrapReturn(test, 'func', function (shim, fn, fnName, ret) {
- return { ret: ret }
+ return { ret }
})
return test
}
@@ -40,7 +40,7 @@ suite.add({
fn: function () {
const test = shared.getTest()
shim.wrapClass(test, 'func', function (shim, fn, fnName, args) {
- return { args: args }
+ return { args }
})
return test
}
@@ -51,7 +51,7 @@ suite.add({
fn: function () {
const test = shared.getTest()
shim.wrapExport(test, function (shim, nodule) {
- return { nodule: nodule }
+ return { nodule }
})
return test
}
diff --git a/test/benchmark/tracer/bindFunction.bench.js b/test/benchmark/tracer/bindFunction.bench.js
index 3feb0b1433..6bad2b1dc3 100644
--- a/test/benchmark/tracer/bindFunction.bench.js
+++ b/test/benchmark/tracer/bindFunction.bench.js
@@ -62,12 +62,14 @@ function allParamBind() {
function twoParamBind() {
const test = shared.getTest()
+ // eslint-disable-next-line no-unused-expressions
Math.random() > 0.5 // rand call so all tests perform same amount of work.
test.func = tracer.bindFunction(test.func, tx.root)
}
function oneParamBind() {
const test = shared.getTest()
+ // eslint-disable-next-line no-unused-expressions
Math.random() > 0.5 // rand call so all tests perform same amount of work.
test.func = tracer.bindFunction(test.func)
}
diff --git a/test/benchmark/tracer/shared.js b/test/benchmark/tracer/shared.js
index 8a0e77f09c..cacca1eee6 100644
--- a/test/benchmark/tracer/shared.js
+++ b/test/benchmark/tracer/shared.js
@@ -10,8 +10,8 @@ const helper = require('../../lib/agent_helper')
function makeSuite(name) {
const agent = helper.loadMockedAgent()
- const suite = benchmark.createBenchmark({ name: name, delay: 0.01 })
- return { agent: agent, suite: suite }
+ const suite = benchmark.createBenchmark({ name, delay: 0.01 })
+ return { agent, suite }
}
function getTest() {
diff --git a/test/benchmark/webframework-shim/recordMiddleware.bench.js b/test/benchmark/webframework-shim/recordMiddleware.bench.js
index 3c72ddf7bf..f0d944dc65 100644
--- a/test/benchmark/webframework-shim/recordMiddleware.bench.js
+++ b/test/benchmark/webframework-shim/recordMiddleware.bench.js
@@ -85,7 +85,7 @@ function getReqd() {
return {
params: { a: 1, b: 2, c: 3 },
[symbols.transactionIinfo]: {
- transaction: transaction,
+ transaction,
segmentStack: [],
errorHandled: false,
error: null
diff --git a/test/helpers/disabled-log/disabled.js b/test/helpers/disabled-log/disabled.js
index 7f4863c4ee..43ec0c7021 100644
--- a/test/helpers/disabled-log/disabled.js
+++ b/test/helpers/disabled-log/disabled.js
@@ -5,9 +5,11 @@
'use strict'
+const path = require('node:path')
+
// Start with a clean slate.
const fs = require('fs')
-const testLogPath = __dirname + '/test.log'
+const testLogPath = path.join(__dirname, 'test.log')
if (fs.existsSync(testLogPath)) {
fs.unlinkSync(testLogPath)
}
diff --git a/test/helpers/disabled-log/newrelic.js b/test/helpers/disabled-log/newrelic.js
index 3eceb58478..3b369ff7e1 100644
--- a/test/helpers/disabled-log/newrelic.js
+++ b/test/helpers/disabled-log/newrelic.js
@@ -5,6 +5,8 @@
'use strict'
+const path = require('node:path')
+
/**
* New Relic agent configuration.
*
@@ -27,7 +29,7 @@ exports.config = {
* production applications.
*/
level: 'trace',
- filepath: __dirname + '/test.log',
+ filepath: path.join(__dirname, 'test.log'),
enabled: false
}
}
diff --git a/test/helpers/exceptions.js b/test/helpers/exceptions.js
index 1fb18fcf48..0508963203 100644
--- a/test/helpers/exceptions.js
+++ b/test/helpers/exceptions.js
@@ -24,7 +24,7 @@ const commands = {
},
domainUncaughtException: function (message) {
- // eslint-disable-next-line node/no-deprecated-api
+ // eslint-disable-next-line n/no-deprecated-api
const domain = require('domain')
const d = domain.create()
diff --git a/test/helpers/secrets.js b/test/helpers/secrets.js
index 3b12bc3015..8174dc7436 100644
--- a/test/helpers/secrets.js
+++ b/test/helpers/secrets.js
@@ -7,6 +7,7 @@
/**
* A helper function to get secrets needed by tests
+ * @param secretName
*/
function getTestSecret(secretName) {
const envVar = process.env[secretName] || ''
diff --git a/test/helpers/synthetics.js b/test/helpers/synthetics.js
index 0f59d53910..08eccbdfb1 100644
--- a/test/helpers/synthetics.js
+++ b/test/helpers/synthetics.js
@@ -33,7 +33,7 @@ const SYNTHETICS_INFO = {
initiator: 'cli',
attributes: {
'Attr-Test': 'value',
- 'attr2Test': 'value1',
+ attr2Test: 'value1',
'xTest-Header': 'value2'
}
}
diff --git a/test/helpers/unwritable-log/newrelic.js b/test/helpers/unwritable-log/newrelic.js
index f42f9b32d3..97dfc592b9 100644
--- a/test/helpers/unwritable-log/newrelic.js
+++ b/test/helpers/unwritable-log/newrelic.js
@@ -5,6 +5,8 @@
'use strict'
+const path = require('node:path')
+
/**
* New Relic agent configuration.
*
@@ -27,6 +29,6 @@ exports.config = {
* production applications.
*/
level: 'trace',
- filepath: __dirname + '/test.log'
+ filepath: path.join(__dirname, 'test.log')
}
}
diff --git a/test/helpers/unwritable-log/unwritable.js b/test/helpers/unwritable-log/unwritable.js
index 56671796bc..e11bf53bce 100644
--- a/test/helpers/unwritable-log/unwritable.js
+++ b/test/helpers/unwritable-log/unwritable.js
@@ -6,8 +6,10 @@
'use strict'
// Create a bad log file.
-const fs = require('fs')
-const testLogPath = __dirname + '/test.log'
+const fs = require('node:fs')
+const path = require('node:path')
+
+const testLogPath = path.join(__dirname, 'test.log')
const readOnlyMode = 0x100 // => 0400 => r - -
if (!fs.existsSync(testLogPath)) {
fs.openSync(testLogPath, 'w', readOnlyMode)
diff --git a/test/integration/agent/serverless-harvest.test.js b/test/integration/agent/serverless-harvest.test.js
index 9b025ccfdf..9bf6c104c8 100644
--- a/test/integration/agent/serverless-harvest.test.js
+++ b/test/integration/agent/serverless-harvest.test.js
@@ -152,7 +152,7 @@ test('sending error traces', async (t) => {
const attrs = errData.agentAttributes
plan.deepStrictEqual(
attrs,
- { 'foo': 'bar', 'request.uri': '/nonexistent', spanId },
+ { foo: 'bar', 'request.uri': '/nonexistent', spanId },
'should have the correct attributes'
)
}
@@ -297,7 +297,7 @@ test('sending error events', async (t) => {
plan.deepStrictEqual(
agentAttr,
- { 'foo': 'bar', 'request.uri': '/nonexistent', spanId },
+ { foo: 'bar', 'request.uri': '/nonexistent', spanId },
'should have the correct attributes'
)
})
diff --git a/test/integration/api/shutdown.test.js b/test/integration/api/shutdown.test.js
index 976253e54a..701c516ed1 100644
--- a/test/integration/api/shutdown.test.js
+++ b/test/integration/api/shutdown.test.js
@@ -46,9 +46,8 @@ test('#shutdown should force harvest and callback after agent restart', (t, end)
helper.unloadAgent(agent)
if (!nock.isDone()) {
- /* eslint-disable no-console */
console.error('Cleaning pending mocks: %j', nock.pendingMocks())
- /* eslint-enable no-console */
+
nock.cleanAll()
assert.fail('Failed to hit all expected endpoints.')
diff --git a/test/integration/collector-remote-method.test.js b/test/integration/collector-remote-method.test.js
index dd71d031f8..1749473435 100644
--- a/test/integration/collector-remote-method.test.js
+++ b/test/integration/collector-remote-method.test.js
@@ -48,7 +48,7 @@ test('DataSender (callback style) talking to fake collector', async (t) => {
const body = JSON.parse(await req.body())
if (Array.isArray(body) === false || body.length) {
- validation.body_errors = [`preconnect expects a body of '[]'`]
+ validation.body_errors = ["preconnect expects a body of '[]'"]
}
const result = {
diff --git a/test/integration/config/config-esm.test.js b/test/integration/config/config-esm.test.js
index 82e5fb0c43..029b7cb193 100644
--- a/test/integration/config/config-esm.test.js
+++ b/test/integration/config/config-esm.test.js
@@ -7,6 +7,7 @@
const test = require('node:test')
const assert = require('node:assert')
+const path = require('node:path')
const semver = require('semver')
const match = require('../../lib/custom-assertions/match')
@@ -19,7 +20,7 @@ const exec = util.promisify(require('child_process').exec)
// it cannot require ESM configuration or can
test('should gracefully handle ESM imports', async (t) => {
await t.test('when requiring newrelic.js in ESM app', async () => {
- const { stdout, stderr } = await exec('node index.mjs', { cwd: `${__dirname}/esm-js` })
+ const { stdout, stderr } = await exec('node index.mjs', { cwd: path.join(__dirname, 'esm-js') })
if (semver.gte(process.version, '22.12.0')) {
match(stdout, 'Hello esm-test')
} else {
@@ -28,7 +29,7 @@ test('should gracefully handle ESM imports', async (t) => {
})
await t.test('when requiring newrelic.mjs in ESM app', async () => {
- const { stdout, stderr } = await exec('node index.mjs', { cwd: `${__dirname}/esm-mjs` })
+ const { stdout, stderr } = await exec('node index.mjs', { cwd: path.join(__dirname, 'esm-mjs') })
if (semver.gte(process.version, '22.12.0')) {
match(stdout, 'Hello esm-test')
} else {
@@ -37,7 +38,7 @@ test('should gracefully handle ESM imports', async (t) => {
})
await t.test('when requiring newrelic.cjs in ESM app', async () => {
- const { stdout, stderr } = await exec('node index.mjs', { cwd: `${__dirname}/esm-cjs` })
+ const { stdout, stderr } = await exec('node index.mjs', { cwd: path.join(__dirname, 'esm-cjs') })
assert.deepStrictEqual(stdout, 'Hello good-esm\n', 'should greet in stdout')
assert.deepStrictEqual(stderr, '', 'all should be quiet in stderr')
})
diff --git a/test/integration/config/esm-cjs/index.mjs b/test/integration/config/esm-cjs/index.mjs
index c6701df3cf..72bfef70bf 100644
--- a/test/integration/config/esm-cjs/index.mjs
+++ b/test/integration/config/esm-cjs/index.mjs
@@ -10,7 +10,5 @@ export default function greeter(name) {
}
if (newrelic.agent) {
- /* eslint-disable no-console */
console.log(greeter(newrelic.agent.config.app_name))
- /* eslint-enable no-console */
}
diff --git a/test/integration/config/esm-js/index.mjs b/test/integration/config/esm-js/index.mjs
index c6701df3cf..72bfef70bf 100644
--- a/test/integration/config/esm-js/index.mjs
+++ b/test/integration/config/esm-js/index.mjs
@@ -10,7 +10,5 @@ export default function greeter(name) {
}
if (newrelic.agent) {
- /* eslint-disable no-console */
console.log(greeter(newrelic.agent.config.app_name))
- /* eslint-enable no-console */
}
diff --git a/test/integration/config/esm-js/newrelic.js b/test/integration/config/esm-js/newrelic.js
index 2a6e1dc5f3..aca92cd86b 100644
--- a/test/integration/config/esm-js/newrelic.js
+++ b/test/integration/config/esm-js/newrelic.js
@@ -9,4 +9,3 @@ export const config = {
app_name: ['esm-test'],
license_key: 'nonsensical-balderdash'
}
-
diff --git a/test/integration/config/esm-mjs/index.mjs b/test/integration/config/esm-mjs/index.mjs
index c6701df3cf..72bfef70bf 100644
--- a/test/integration/config/esm-mjs/index.mjs
+++ b/test/integration/config/esm-mjs/index.mjs
@@ -10,7 +10,5 @@ export default function greeter(name) {
}
if (newrelic.agent) {
- /* eslint-disable no-console */
console.log(greeter(newrelic.agent.config.app_name))
- /* eslint-enable no-console */
}
diff --git a/test/integration/config/esm-mjs/newrelic.mjs b/test/integration/config/esm-mjs/newrelic.mjs
index 2a6e1dc5f3..aca92cd86b 100644
--- a/test/integration/config/esm-mjs/newrelic.mjs
+++ b/test/integration/config/esm-mjs/newrelic.mjs
@@ -9,4 +9,3 @@ export const config = {
app_name: ['esm-test'],
license_key: 'nonsensical-balderdash'
}
-
diff --git a/test/integration/core/crypto.test.js b/test/integration/core/crypto.test.js
index e9845463d6..5ebcd35d4d 100644
--- a/test/integration/core/crypto.test.js
+++ b/test/integration/core/crypto.test.js
@@ -56,7 +56,7 @@ test('sync randomBytes', function (t, end) {
test('pseudoRandomBytes', function (t, end) {
const { agent } = t.nr
helper.runInTransaction(agent, function () {
- // eslint-disable-next-line node/no-deprecated-api
+ // eslint-disable-next-line n/no-deprecated-api
crypto.pseudoRandomBytes(32, function (err, key) {
assert.ok(!err, 'should not error')
assert.ok(key.length, 32)
@@ -68,7 +68,7 @@ test('pseudoRandomBytes', function (t, end) {
test('sync pseudoRandomBytes', function (t, end) {
const { agent } = t.nr
helper.runInTransaction(agent, function (transaction) {
- // eslint-disable-next-line node/no-deprecated-api
+ // eslint-disable-next-line n/no-deprecated-api
const bytes = crypto.pseudoRandomBytes(32)
assert.ok(bytes instanceof Buffer)
assert.equal(bytes.length, 32)
diff --git a/test/integration/core/dns.test.js b/test/integration/core/dns.test.js
index 4739428066..1f0d0ca2a7 100644
--- a/test/integration/core/dns.test.js
+++ b/test/integration/core/dns.test.js
@@ -57,7 +57,7 @@ test('resolve', function (t, end) {
dns.resolve('example.com', function (err, ips) {
assert.ok(!err, 'should not error')
assert.equal(ips.length, 1)
- assert.ok(ips[0].match(/^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$/))
+ assert.ok(ips[0].match(/^(?:\d{1,3}\.){3}\d{1,3}$/))
const children = []
verifySegments({ agent, end, name: 'dns.resolve', children })
@@ -71,7 +71,7 @@ test('resolve4', function (t, end) {
dns.resolve4('example.com', function (err, ips) {
assert.ok(!err, 'should not error')
assert.equal(ips.length, 1)
- assert.ok(ips[0].match(/^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$/))
+ assert.ok(ips[0].match(/^(?:\d{1,3}\.){3}\d{1,3}$/))
verifySegments({ agent, end, name: 'dns.resolve4' })
})
})
@@ -83,7 +83,7 @@ test('resolve6', function (t, end) {
dns.resolve6('example.com', function (err, ips) {
assert.ok(!err, 'should not error')
assert.equal(ips.length, 1)
- assert.ok(ips[0].match(/^(([0-9a-f]{1,4})(\:|$)){8}/))
+ assert.ok(ips[0].match(/^(([0-9a-f]{1,4})(:|$)){8}/))
verifySegments({ agent, end, name: 'dns.resolve6' })
})
})
diff --git a/test/integration/core/exceptions.test.js b/test/integration/core/exceptions.test.js
index b1af44ec5b..b50a2f1380 100644
--- a/test/integration/core/exceptions.test.js
+++ b/test/integration/core/exceptions.test.js
@@ -180,7 +180,7 @@ test('Report exceptions handled in setUncaughtExceptionCaptureCallback', async (
function startProc(env) {
return cp.fork(path.join(helpersDir, 'exceptions.js'), {
stdio: ['pipe', 'pipe', 'pipe', 'ipc'],
- env: env
+ env
})
}
diff --git a/test/integration/core/exec-me.js b/test/integration/core/exec-me.js
index cdffac8062..a6f8932525 100755
--- a/test/integration/core/exec-me.js
+++ b/test/integration/core/exec-me.js
@@ -6,10 +6,8 @@
'use strict'
-/* eslint-disable no-console */
console.log('I am stdout')
console.error('I am stderr')
-/* eslint-enable no-console */
if (process.send) {
process.send('hello')
diff --git a/test/integration/core/fs.test.js b/test/integration/core/fs.test.js
index aae59dac02..f5a5515369 100644
--- a/test/integration/core/fs.test.js
+++ b/test/integration/core/fs.test.js
@@ -227,7 +227,7 @@ test('chmod', async function (t) {
// Only exists on Darwin currently, using this check to catch if it
// appears in other versions too.
-// eslint-disable-next-line node/no-deprecated-api
+// eslint-disable-next-line n/no-deprecated-api
test('lchmod', { skip: fs.lchmod === undefined }, async function (t) {
const { agent } = t.nr
const plan = tspl(t, { plan: 13 })
@@ -236,7 +236,7 @@ test('lchmod', { skip: fs.lchmod === undefined }, async function (t) {
fs.writeFileSync(name, content)
plan.equal((fs.statSync(name).mode & 0x1ff).toString(8), '666')
helper.runInTransaction(agent, function (trans) {
- // eslint-disable-next-line node/no-deprecated-api
+ // eslint-disable-next-line n/no-deprecated-api
fs.lchmod(name, '0777', function (err) {
plan.equal(err, null, 'should not error')
plan.equal((fs.statSync(name).mode & 0x1ff).toString(8), '777')
@@ -820,7 +820,7 @@ test('exists', async function (t) {
fs.writeFileSync(name, content)
helper.runInTransaction(agent, function (trans) {
- // eslint-disable-next-line node/no-deprecated-api
+ // eslint-disable-next-line n/no-deprecated-api
fs.exists(name, function (exists) {
plan.ok(exists, 'should exist')
verifySegments({ agent, assert: plan, name: NAMES.FS.PREFIX + 'exists' })
diff --git a/test/integration/core/inspector.test.js b/test/integration/core/inspector.test.js
index 802b5e9b2a..246f47ac91 100644
--- a/test/integration/core/inspector.test.js
+++ b/test/integration/core/inspector.test.js
@@ -7,7 +7,6 @@
const test = require('node:test')
const assert = require('node:assert')
-// eslint-disable-next-line node/no-unsupported-features/node-builtins
const inspector = require('inspector')
const helper = require('../../lib/agent_helper')
diff --git a/test/integration/core/native-promises.test.js b/test/integration/core/native-promises.test.js
index 75a2f5f2bd..a6c1b4985d 100644
--- a/test/integration/core/native-promises.test.js
+++ b/test/integration/core/native-promises.test.js
@@ -132,9 +132,9 @@ test('AsyncLocalStorage based tracking', async (t) => {
await helper.runInTransaction(agent, function () {
let promise = null
assert.doesNotThrow(function () {
- promise = new Promise(function (res) {
- res()
- res()
+ promise = new Promise(function (resolve) {
+ resolve()
+ resolve()
})
})
return promise
@@ -570,8 +570,8 @@ test('AsyncLocalStorage based tracking', async (t) => {
async function (t) {
const plan = tspl(t, { plan: 3 })
const testMetrics = createHook()
- await new Promise(function (res) {
- setTimeout(res, 10)
+ await new Promise(function (resolve) {
+ setTimeout(resolve, 10)
})
setImmediate(checkCallMetrics, plan, testMetrics)
await plan.completed
diff --git a/test/integration/core/util.test.js b/test/integration/core/util.test.js
index af8a9ed117..c8725fc2f0 100644
--- a/test/integration/core/util.test.js
+++ b/test/integration/core/util.test.js
@@ -46,7 +46,7 @@ test('promisify', async function (t) {
})
await t.test('should work on fs.exists', async function () {
- // eslint-disable-next-line node/no-deprecated-api
+ // eslint-disable-next-line n/no-deprecated-api
const asyncExists = util.promisify(require('fs').exists)
const result = await asyncExists(path.join(__dirname, 'exec-me.js'))
diff --git a/test/integration/distributed-tracing/dt.test.js b/test/integration/distributed-tracing/dt.test.js
index 773efc0a0e..b59452a47c 100644
--- a/test/integration/distributed-tracing/dt.test.js
+++ b/test/integration/distributed-tracing/dt.test.js
@@ -278,7 +278,7 @@ test('distributed tracing', async (t) => {
const tx = agent.tracer.getTransaction()
tx.nameState.appendPath('foobar')
- return get(generateUrl(port, endpoint), (err, { body }) => {
+ return get(generateUrl(port, endpoint), (_, { body }) => {
tx.nameState.popPath('foobar')
createResponse(req, res, body, bodyProperty)
})
diff --git a/test/integration/distributed-tracing/span-error-attributes.tap.js b/test/integration/distributed-tracing/span-error-attributes.tap.js
index cf102be9d2..7c2ecec315 100644
--- a/test/integration/distributed-tracing/span-error-attributes.tap.js
+++ b/test/integration/distributed-tracing/span-error-attributes.tap.js
@@ -40,7 +40,7 @@ test('core', async (t) => {
spanEvents.forEach((s) => {
const attrs = s.attributes
- match(attrs['error.message'], /test[0-9]/, { assert: plan })
+ match(attrs['error.message'], /test\d/, { assert: plan })
match(attrs['error.class'], 'Error', { assert: plan })
})
diff --git a/test/integration/distributed-tracing/trace-context-cross-agent.test.js b/test/integration/distributed-tracing/trace-context-cross-agent.test.js
index d94c994ade..43ee2465b3 100644
--- a/test/integration/distributed-tracing/trace-context-cross-agent.test.js
+++ b/test/integration/distributed-tracing/trace-context-cross-agent.test.js
@@ -25,7 +25,10 @@ const camelCaseToSnakeCase = function (object) {
const getDescendantValue = function (object, descendants) {
const arrayDescendants = descendants.split('.')
- while (arrayDescendants.length && (object = object[arrayDescendants.shift()])) {}
+ const noop = () => {}
+ while (arrayDescendants.length && (object = object[arrayDescendants.shift()])) {
+ noop()
+ }
return object
}
@@ -36,7 +39,7 @@ function hasNestedProperty(object, descendants) {
for (let i = 0; i < arrayDescendants.length; i++) {
const property = arrayDescendants[i]
- if (!currentItem || !currentItem.hasOwnProperty(property)) {
+ if (!currentItem || !Object.prototype.hasOwnProperty.call(currentItem, property)) {
return false
}
@@ -150,9 +153,9 @@ function getExactExpectedUnexpectedFromIntrinsics(testCase, eventType) {
const unexpected = (specific.unexpected || []).concat(common.unexpected || [])
return {
- exact: exact,
- expected: expected,
- unexpected: unexpected
+ exact,
+ expected,
+ unexpected
}
}
@@ -209,6 +212,7 @@ function runTestCaseOutboundPayloads(testCase, context) {
break
case 'unexpected':
testUnexpected(context[key], fields)
+ break
case 'notequal':
testNotEqual(context[key], fields)
break
@@ -249,7 +253,7 @@ function runTestCaseTargetEvents(testCase, agent) {
for (const [index] of toCheck.entries()) {
// Span events are not payload-formatted
// straight out of the aggregator.
- const event = 'Span' === eventType ? toCheck[index].toJSON() : toCheck[index]
+ const event = eventType === 'Span' ? toCheck[index].toJSON() : toCheck[index]
testSingleEvent(event, eventType, fixture)
}
}
diff --git a/test/integration/grpc/reconnect.test.js b/test/integration/grpc/reconnect.test.js
index 4f2991cb3b..eb865707c3 100644
--- a/test/integration/grpc/reconnect.test.js
+++ b/test/integration/grpc/reconnect.test.js
@@ -6,6 +6,7 @@
'use strict'
const test = require('node:test')
+const path = require('node:path')
const tspl = require('@matteo.collina/tspl')
const GrpcConnection = require('../../../lib/grpc/connection')
@@ -41,6 +42,7 @@ test('test that connection class reconnects', async (t) => {
*
* While the test functions correctly with a valid connection,
* we ensure proper connection / OK status handling for this case.
+ * @param stream
*/
const recordSpan = (stream) => {
serverConnections++
@@ -70,7 +72,7 @@ test('test that connection class reconnects', async (t) => {
const traceObserverConfig = {
trace_observer: {
host: helper.SSL_HOST,
- port: port
+ port
}
}
@@ -154,7 +156,7 @@ test('Should reconnect even when data sent back', async (t) => {
const traceObserverConfig = {
trace_observer: {
host: helper.SSL_HOST,
- port: port
+ port
}
}
@@ -209,13 +211,13 @@ async function setupSsl() {
function setupServer(t, sslOpts, recordSpan) {
const packageDefinition = protoLoader.loadSync(
- __dirname + '/../../../lib/grpc/endpoints/infinite-tracing/v1.proto',
+ path.join(__dirname, '/../../../lib/grpc/endpoints/infinite-tracing/v1.proto'),
{ keepCase: true, longs: String, enums: String, defaults: true, oneofs: true }
)
const infiniteTracingService = grpc.loadPackageDefinition(packageDefinition).com.newrelic.trace.v1
const server = new grpc.Server()
- server.addService(infiniteTracingService.IngestService.service, { recordSpan: recordSpan })
+ server.addService(infiniteTracingService.IngestService.service, { recordSpan })
const { ca, authPairs } = sslOpts
@@ -240,8 +242,8 @@ function createMetricAggregatorForTests() {
return new MetricAggregator(
{
apdexT: 0.5,
- mapper: mapper,
- normalizer: normalizer
+ mapper,
+ normalizer
},
{},
{ add() {} }
diff --git a/test/integration/index/index-disabled.test.js b/test/integration/index/index-disabled.test.js
index f743784922..8f7c01e595 100644
--- a/test/integration/index/index-disabled.test.js
+++ b/test/integration/index/index-disabled.test.js
@@ -7,9 +7,10 @@
const test = require('node:test')
const assert = require('node:assert')
+const path = require('node:path')
test('loading the application via index.js with agent disabled', () => {
- process.env.NEW_RELIC_HOME = __dirname + '/..'
+ process.env.NEW_RELIC_HOME = path.join(__dirname, '/..')
process.env.NEW_RELIC_ENABLED = 'false'
const api = require('../../../index.js')
diff --git a/test/integration/index/index-no-config.test.js b/test/integration/index/index-no-config.test.js
index 44523ebde3..b2e1f7f73e 100644
--- a/test/integration/index/index-no-config.test.js
+++ b/test/integration/index/index-no-config.test.js
@@ -15,14 +15,12 @@ test('loading the application via index.js with no config', (t) => {
return __dirname
}
- /* eslint-disable no-console */
const logs = []
const logError = console.error
t.after(() => {
console.error = logError
})
console.error = (...args) => logs.push(args)
- /* eslint-enable no-console */
let api
assert.doesNotThrow(function () {
diff --git a/test/integration/infinite-tracing-connection.test.js b/test/integration/infinite-tracing-connection.test.js
index 368b12bbf8..6f90ebc003 100644
--- a/test/integration/infinite-tracing-connection.test.js
+++ b/test/integration/infinite-tracing-connection.test.js
@@ -105,9 +105,7 @@ const infiniteTracingService = grpc.loadPackageDefinition(packageDefinition).com
helper.unloadAgent(agent)
if (!nock.isDone()) {
- /* eslint-disable no-console */
console.error('Cleaning pending mocks: %j', nock.pendingMocks())
- /* eslint-enable no-console */
nock.cleanAll()
}
diff --git a/test/integration/instrumentation/fetch.test.js b/test/integration/instrumentation/fetch.test.js
index 4a9536d114..96ff6e8192 100644
--- a/test/integration/instrumentation/fetch.test.js
+++ b/test/integration/instrumentation/fetch.test.js
@@ -62,7 +62,7 @@ test('fetch', async function (t) {
headers: {
'Content-Type': 'application.json'
},
- body: Buffer.from(`{"key":"value"}`)
+ body: Buffer.from('{"key":"value"}')
})
assert.equal(status, 200)
@@ -75,7 +75,7 @@ test('fetch', async function (t) {
headers: {
'Content-Type': 'application.json'
},
- body: Buffer.from(`{"key":"value"}`)
+ body: Buffer.from('{"key":"value"}')
})
assert.equal(status, 200)
@@ -132,14 +132,14 @@ test('fetch', async function (t) {
headers: {
'Content-Type': 'application.json'
},
- body: Buffer.from(`{"key":"value"}`)
+ body: Buffer.from('{"key":"value"}')
})
const req2 = fetch(`${REQUEST_URL}/put`, {
method: 'PUT',
headers: {
'Content-Type': 'application.json'
},
- body: Buffer.from(`{"key":"value"}`)
+ body: Buffer.from('{"key":"value"}')
})
const [{ status }, { status: status2 }] = await Promise.all([req1, req2])
assert.equal(status, 200)
@@ -178,6 +178,7 @@ test('fetch', async function (t) {
}, 100)
await req
} catch (err) {
+ assert.match(err.message, /This operation was aborted/)
assertSegments(tx.trace.root, [`External/${HOST}/delay/1000`], { exact: false })
assert.equal(tx.exceptions.length, 1)
assert.equal(tx.exceptions[0].error.name, 'AbortError')
@@ -204,6 +205,7 @@ test('fetch', async function (t) {
try {
await req
} catch (error) {
+ assert.match(error.message, /fetch failed/)
assertSegments(transaction.trace.root, [`External/localhost:${port}/`], {
exact: false
})
diff --git a/test/integration/instrumentation/http-outbound.test.js b/test/integration/instrumentation/http-outbound.test.js
index 594cdb225a..2dd1e1ddf0 100644
--- a/test/integration/instrumentation/http-outbound.test.js
+++ b/test/integration/instrumentation/http-outbound.test.js
@@ -141,7 +141,7 @@ test('external requests', async function (t) {
const segment = agent.tracer.getTransaction().trace.root.children[0]
assert.equal(
segment.name,
- `External/www.google.com/proxy/path`,
+ 'External/www.google.com/proxy/path',
'should name segment as an external service'
)
end()
diff --git a/test/integration/instrumentation/http-rum.test.js b/test/integration/instrumentation/http-rum.test.js
index ac2ea25c0f..0249829651 100644
--- a/test/integration/instrumentation/http-rum.test.js
+++ b/test/integration/instrumentation/http-rum.test.js
@@ -89,11 +89,11 @@ test('custom naming rules should be applied early for RUM', async function (t) {
external.listen(0, function () {
const port = external.address().port
- http.request({ port: port, path: '/test' }, done).end()
+ http.request({ port, path: '/test' }, done).end()
function done(res) {
res.pipe(
- new StreamSink(function (err, header) {
+ new StreamSink(function (_, header) {
assertBrowserHeader({ agent, header, plan })
})
)
diff --git a/test/integration/instrumentation/http.test.js b/test/integration/instrumentation/http.test.js
index 4ed3845cf1..fd6031e9fc 100644
--- a/test/integration/instrumentation/http.test.js
+++ b/test/integration/instrumentation/http.test.js
@@ -163,7 +163,7 @@ test('built-in http instrumentation should handle internal & external requests',
end()
})
- }.bind(this)
+ }
external.listen(TEST_EXTERNAL_PORT, TEST_HOST, function () {
server.listen(TEST_INTERNAL_PORT, TEST_HOST, function () {
@@ -216,7 +216,7 @@ test('built-in http instrumentation should not swallow errors', async function (
})
// this is gonna blow up
- // eslint-disable-next-line no-use-before-define
+ // eslint-disable-next-line no-use-before-define, sonarjs/no-dead-store
const x = x.dieshere.ohno
}
@@ -421,7 +421,7 @@ test('built-in http instrumentation should not crash when server does not have a
function makeRequest(callback) {
const options = {
hostname: 'localhost',
- port: port,
+ port,
path: '/',
agent: false
}
diff --git a/test/integration/keep-alive.test.js b/test/integration/keep-alive.test.js
index d4bc1f6f86..5c2ba069a1 100644
--- a/test/integration/keep-alive.test.js
+++ b/test/integration/keep-alive.test.js
@@ -14,7 +14,7 @@ const promiseResolvers = require('../lib/promise-resolvers')
const RemoteMethod = require('../../lib/collector/remote-method')
test('RemoteMethod makes two requests with one connection', async (t) => {
- const plan = tspl(t, { plan: 3 })
+ const plan = tspl(t, { plan: 4 })
const { promise, resolve, reject } = promiseResolvers()
const cert = fakeCert()
const serverOpts = { key: cert.privateKey, cert: cert.certificate }
@@ -43,13 +43,14 @@ test('RemoteMethod makes two requests with one connection', async (t) => {
}
})
- await new Promise((done) => {
- server.listen(0, '127.0.0.1', done)
+ await new Promise((resolve) => {
+ server.listen(0, '127.0.0.1', resolve)
})
const port = server.address().port
const method = createRemoteMethod(port, cert)
method.invoke({}, [], (error, res) => {
+ plan.ifError(error)
plan.equal(res.status, 200, 'first request success')
const method2 = createRemoteMethod(port, cert)
@@ -72,7 +73,7 @@ function createRemoteMethod(port, cert) {
const endpoint = {
host: '127.0.0.1',
- port: port
+ port
}
config.certificates = [cert.certificate]
diff --git a/test/integration/module-loading/module-loading.test.js b/test/integration/module-loading/module-loading.test.js
index 68a6cfc8b8..6f14a49c1e 100644
--- a/test/integration/module-loading/module-loading.test.js
+++ b/test/integration/module-loading/module-loading.test.js
@@ -77,13 +77,11 @@ test('should only log supportability metric for tracking type instrumentation',
const PKG = `${FEATURES.INSTRUMENTATION.ON_REQUIRE}/knex`
const PKG_VERSION = `${FEATURES.INSTRUMENTATION.ON_REQUIRE}/knex/Version/1`
- // eslint-disable-next-line node/no-extraneous-require
require('knex')
const knexOnRequiredMetric = agent.metrics._metrics.unscoped[PKG]
assert.equal(knexOnRequiredMetric.callCount, 1, `should record ${PKG}`)
const knexVersionMetric = agent.metrics._metrics.unscoped[PKG_VERSION]
assert.equal(knexVersionMetric.callCount, 1, `should record ${PKG_VERSION}`)
- // eslint-disable-next-line node/no-extraneous-require
const modPath = path.dirname(require.resolve('knex'))
assert.ok(shimmer.isInstrumented('knex', modPath), 'should mark tracking modules as instrumented')
})
@@ -160,7 +158,7 @@ test('Should create usage version metric onRequire', (t, end) => {
test('Should create usage metric onRequire for built-in', (t) => {
const { agent } = t.nr
const domainMetric = `${FEATURES.INSTRUMENTATION.ON_REQUIRE}/domain`
- // eslint-disable-next-line node/no-deprecated-api
+ // eslint-disable-next-line n/no-deprecated-api
require('domain')
const onRequireMetric = agent.metrics._metrics.unscoped[domainMetric]
diff --git a/test/integration/newrelic-harvest-limits.test.js b/test/integration/newrelic-harvest-limits.test.js
index 3cf10262b0..298ff23a47 100644
--- a/test/integration/newrelic-harvest-limits.test.js
+++ b/test/integration/newrelic-harvest-limits.test.js
@@ -54,7 +54,6 @@ test('Connect calls re-generate harvest limits from original config values', (t,
t.after(() => {
helper.unloadAgent(agent)
if (!nock.isDone()) {
- // eslint-disable-next-line no-console
console.error('Cleaning pending mocks: %j', nock.pendingMocks())
nock.cleanAll()
}
diff --git a/test/integration/newrelic-response-handling.test.js b/test/integration/newrelic-response-handling.test.js
index 1a38e82439..2e617d9d81 100644
--- a/test/integration/newrelic-response-handling.test.js
+++ b/test/integration/newrelic-response-handling.test.js
@@ -79,7 +79,6 @@ test('New Relic response code handling', async (t) => {
helper.unloadAgent(agent)
testClock.restore()
if (!nock.isDone()) {
- // eslint-disable-next-line no-console
console.error('Cleaning pending mocks: %j', nock.pendingMocks())
nock.cleanAll()
}
diff --git a/test/integration/security-policies-cross-agent.test.js b/test/integration/security-policies-cross-agent.test.js
index 66acb7d1da..86051975f3 100644
--- a/test/integration/security-policies-cross-agent.test.js
+++ b/test/integration/security-policies-cross-agent.test.js
@@ -76,7 +76,6 @@ test('LASP/CSP - Cross Agent Tests', async (t) => {
t.after(() => {
helper.unloadAgent(agent)
if (!nock.isDone()) {
- // eslint-disable-next-line no-console
console.error('Cleaning pending mocks: %j', nock.pendingMocks())
nock.cleanAll()
}
diff --git a/test/integration/shimmer/testdata/index.js b/test/integration/shimmer/testdata/index.js
index 947219665b..f4f0271889 100644
--- a/test/integration/shimmer/testdata/index.js
+++ b/test/integration/shimmer/testdata/index.js
@@ -5,7 +5,6 @@
'use strict'
-// eslint-disable-next-line node/no-extraneous-require
const Person = require('person')
module.exports = Person
diff --git a/test/integration/uninstrumented/uninstrumented.test.js b/test/integration/uninstrumented/uninstrumented.test.js
index d4edff166c..7557dd0d98 100644
--- a/test/integration/uninstrumented/uninstrumented.test.js
+++ b/test/integration/uninstrumented/uninstrumented.test.js
@@ -8,16 +8,13 @@
const test = require('node:test')
const assert = require('node:assert')
-/* eslint-disable node/no-unpublished-require */
const Metrics = require('../../../lib/metrics')
const MetricNormalizer = require('../../../lib/metrics/normalizer')
const MetricMapper = require('../../../lib/metrics/mapper')
-// eslint-disable-next-line node/no-extraneous-require
const uninstrumented = require('../../../lib/uninstrumented')
const helper = require('../../lib/agent_helper')
const shimmer = require('../../../lib/shimmer')
-/* eslint-enable node/no-unpublished-require */
test('does not mark files with known module names as uninstrumented', (t) => {
const loaded = []
@@ -62,7 +59,7 @@ test('all instrumented modules should be detected when uninstrumented', (t, end)
try {
require(module)
loaded.push(module)
- } catch (err) {
+ } catch {
t.diagnostic('failed to load ' + module)
}
}
diff --git a/test/integration/utilization/system-info.test.js b/test/integration/utilization/system-info.test.js
index 1b5693d3e6..909a7783d6 100644
--- a/test/integration/utilization/system-info.test.js
+++ b/test/integration/utilization/system-info.test.js
@@ -35,7 +35,7 @@ test('pricing system-info aws', function (t, end) {
const ecsScope = nock(awsHost).get('/docker').reply(200, { DockerId: 'ecs-container-1' })
const awsRedirect = nock(awsHost)
awsRedirect.put('/latest/api/token').reply(200, 'awsToken')
- // eslint-disable-next-line guard-for-in
+
for (const awsPath in awsResponses) {
awsRedirect.get(`/latest/${awsPath}`).reply(200, awsResponses[awsPath])
}
@@ -55,6 +55,7 @@ test('pricing system-info aws', function (t, end) {
})
fetchSystemInfo(agent, function fetchSystemInfoCb(err, systemInfo) {
+ assert.ifError(err)
assert.deepEqual(systemInfo.vendors.aws, {
instanceType: 'test.type',
instanceId: 'test.id',
@@ -66,6 +67,7 @@ test('pricing system-info aws', function (t, end) {
assert.ok(awsRedirect.isDone(), 'should exhaust nock endpoints')
assert.ok(ecsScope.isDone())
fetchSystemInfo(agent, function checkCache(err, cachedInfo) {
+ assert.ifError(err)
assert.deepEqual(cachedInfo.vendors.aws, {
instanceType: 'test.type',
instanceId: 'test.id',
@@ -102,6 +104,7 @@ test('pricing system-info azure', function (t, end) {
})
fetchSystemInfo(agent, function fetchSystemInfoCb(err, systemInfo) {
+ assert.ifError(err)
assert.deepEqual(systemInfo.vendors.azure, {
location: 'test.location',
name: 'test.name',
@@ -112,6 +115,7 @@ test('pricing system-info azure', function (t, end) {
// This will throw an error if the sys info isn't being cached properly
assert.ok(azureRedirect.isDone(), 'should exhaust nock endpoints')
fetchSystemInfo(agent, function checkCache(err, cachedInfo) {
+ assert.ifError(err)
assert.deepEqual(cachedInfo.vendors.azure, {
location: 'test.location',
name: 'test.name',
@@ -150,6 +154,7 @@ test('pricing system-info gcp', function (t, end) {
})
fetchSystemInfo(agent, function fetchSystemInfoCb(err, systemInfo) {
+ assert.ifError(err)
const expectedData = {
id: '3161347020215157123',
machineType: 'custom-1-1024',
@@ -161,6 +166,7 @@ test('pricing system-info gcp', function (t, end) {
// This will throw an error if the sys info isn't being cached properly
assert.ok(gcpRedirect.isDone(), 'should exhaust nock endpoints')
fetchSystemInfo(agent, function checkCache(err, cachedInfo) {
+ assert.ifError(err)
assert.deepEqual(cachedInfo.vendors.gcp, expectedData)
end()
})
@@ -186,6 +192,7 @@ test('pricing system-info pcf', function (t, end) {
process.env.MEMORY_LIMIT = '1024m'
fetchSystemInfo(agent, function fetchSystemInfoCb(err, systemInfo) {
+ assert.ifError(err)
const expectedData = {
cf_instance_guid: 'b977d090-83db-4bdb-793a-bb77',
cf_instance_ip: '10.10.147.130',
@@ -223,6 +230,7 @@ test('pricing system-info docker', function (t, end) {
})
fetchSystemInfoProxy(agent, function fetchSystemInfoCb(err, systemInfo) {
+ assert.ifError(err)
const expectedData = {
id: '47cbd16b77c50cbf71401c069cd2189f0e659af17d5a2daca3bddf59d8a870b2'
}
diff --git a/test/integration/utilization/vendor-info-tests.js b/test/integration/utilization/vendor-info-tests.js
index 15043617cd..987a4bb1e7 100644
--- a/test/integration/utilization/vendor-info-tests.js
+++ b/test/integration/utilization/vendor-info-tests.js
@@ -84,6 +84,7 @@ function makeTest(testCase, vendor, getInfo) {
// results.
assert.ok(host.isDone(), 'should have no mocked endpoints')
getInfo(agent, function getCachedInfo(err, cached) {
+ assert.ifError(err)
assert.deepEqual(cached, info, 'should have same data cached')
end()
})
diff --git a/test/lib/agent_helper.js b/test/lib/agent_helper.js
index 996e771f3d..3145d98f8b 100644
--- a/test/lib/agent_helper.js
+++ b/test/lib/agent_helper.js
@@ -73,6 +73,7 @@ helper.getTracer = () => _agent?.tracer
* See agent.js for details, but so far this includes
* passing in a config object and the connection stub
* created in this function.
+ * @param setState
* @returns {Agent} Agent with a stubbed configuration.
*/
helper.loadMockedAgent = function loadMockedAgent(conf, setState = true) {
@@ -121,6 +122,8 @@ helper.getAgentApi = function getAgentApi() {
* @param {String} method The method being invoked on the collector.
* @param number runID Agent run ID (optional).
*
+ * @param runID
+ * @param protocolVersion
* @returns {String} URL path for the collector.
*/
helper.generateCollectorPath = function generateCollectorPath(method, runID, protocolVersion) {
@@ -164,9 +167,10 @@ helper.generateAllPaths = (runId) => {
* but so far this includes passing in a config object and the connection
* stub created in this function.
*
- * @param {boolean} [setState=true]
+ * @param {boolean} [setState]
* Initializes agent's state to 'started', enabling data collection.
*
+ * @param shimmer
* @returns {Agent} Agent with a stubbed configuration.
*/
helper.instrumentMockedAgent = (conf, setState = true, shimmer = require('../../lib/shimmer')) => {
@@ -185,6 +189,7 @@ helper.instrumentMockedAgent = (conf, setState = true, shimmer = require('../../
* Helper to check if security agent should be loaded
*
* @param {Agent} Agent with a stubbed configuration
+ * @param agent
* @returns {boolean}
*/
helper.isSecurityAgentEnabled = function isSecurityAgentEnabled(agent) {
@@ -196,6 +201,7 @@ helper.isSecurityAgentEnabled = function isSecurityAgentEnabled(agent) {
* and requires it and calls start
*
* @param {Agent} Agent with a stubbed configuration
+ * @param agent
*/
helper.maybeLoadSecurityAgent = function maybeLoadSecurityAgent(agent) {
if (helper.isSecurityAgentEnabled(agent)) {
@@ -210,6 +216,7 @@ helper.maybeLoadSecurityAgent = function maybeLoadSecurityAgent(agent) {
* files in its require cache so it can be re-loaded
*
* @param {Agent} Agent with a stubbed configuration
+ * @param agent
*/
helper.maybeUnloadSecurityAgent = function maybeUnloadSecurityAgent(agent) {
if (helper.isSecurityAgentEnabled(agent)) {
@@ -222,6 +229,8 @@ helper.maybeUnloadSecurityAgent = function maybeUnloadSecurityAgent(agent) {
* is shut down.
*
* @param Agent agent The agent to shut down.
+ * @param agent
+ * @param shimmer
*/
helper.unloadAgent = (agent, shimmer = require('../../lib/shimmer')) => {
agent.emit('unload')
@@ -263,7 +272,7 @@ helper.loadTestAgent = (t, conf, setState = true) => {
*
* @param {Agent} agent The agent whose tracer should be used to create the
* transaction.
- * @param {string} [type='web'] Indicates the class of the transaction.
+ * @param {string} [type] Indicates the class of the transaction.
* @param {Function} callback The function to be run within the transaction.
*/
helper.runInTransaction = (agent, type, callback) => {
@@ -292,6 +301,9 @@ helper.runInTransaction = (agent, type, callback) => {
/**
* Proxy for runInTransaction that names the transaction that the
* callback is executed in
+ * @param agent
+ * @param type
+ * @param callback
*/
helper.runInNamedTransaction = (agent, type, callback) => {
if (!callback && typeof type === 'function') {
@@ -315,6 +327,7 @@ helper.runInSegment = (agent, name, callback) => {
* Select Redis DB index and flush entries in it.
*
* @param {redis} [redis]
+ * @param client
* @param {number} dbIndex
* @param {function} callback
* The operations to be performed while the server is running.
@@ -367,14 +380,12 @@ helper.startServerWithRandomPortRetry = (server, maxAttempts = 5) => {
// server port not guaranteed to be not in use
if (e.code === 'EADDRINUSE') {
if (attempts >= maxAttempts) {
- // eslint-disable-next-line no-console
console.log('Exceeded max attempts (%s), bailing out.', maxAttempts)
throw new Error('Unable to get unused port')
}
attempts++
- // eslint-disable-next-line no-console
console.log('Address in use, retrying...')
setTimeout(() => {
server.close()
@@ -395,6 +406,7 @@ helper.startServerWithRandomPortRetry = (server, maxAttempts = 5) => {
* request is made after instrumentation is registered
* we want to make sure we get the original library and not
* our instrumented one
+ * @param ca
*/
helper.getRequestLib = function getRequestLib(ca) {
const request = ca ? https.request : http.request
@@ -514,6 +526,7 @@ helper.getMetrics = function getMetrics(agent) {
*
* @param {object} shim shim lib
* @param {Function} original callback
+ * @param cb
*/
helper.checkWrappedCb = function checkWrappedCb(shim, cb) {
// The wrapped calledback is always the last argument
@@ -596,6 +609,7 @@ helper.getShim = function getShim(pkg) {
*/
helper.execSync = function execSync({ cwd, script }) {
try {
+ // eslint-disable-next-line sonarjs/os-command
cp.execSync(`node ./${script}`, {
stdio: 'pipe',
encoding: 'utf8',
diff --git a/test/lib/aws-server-stubs/ai-server/index.js b/test/lib/aws-server-stubs/ai-server/index.js
index 31c65a0dc2..5ab12bdd04 100644
--- a/test/lib/aws-server-stubs/ai-server/index.js
+++ b/test/lib/aws-server-stubs/ai-server/index.js
@@ -85,9 +85,9 @@ function handler(req, res) {
break
}
+ // v1 seems to be the same as v2, just with less helpful responses.
case 'anthropic.claude-v1':
case 'anthropic.claude-instant-v1':
- // v1 seems to be the same as v2, just with less helpful responses.
case 'anthropic.claude-v2':
case 'anthropic.claude-v2:1': {
response = responses.claude.get(payload.prompt)
@@ -113,9 +113,9 @@ function handler(req, res) {
break
}
+ // llama3 responses are identical, just return llama2 data
case 'meta.llama2-13b-chat-v1':
case 'meta.llama2-70b-chat-v1':
- // llama3 responses are indentical, just return llama2 data
case 'meta.llama3-8b-instruct-v1:0':
case 'meta.llama3-70b-instruct-v1:0': {
response = responses.llama.get(payload.prompt)
diff --git a/test/lib/aws-server-stubs/ai-server/responses/amazon.js b/test/lib/aws-server-stubs/ai-server/responses/amazon.js
index 69ce8836e2..754936b0c5 100644
--- a/test/lib/aws-server-stubs/ai-server/responses/amazon.js
+++ b/test/lib/aws-server-stubs/ai-server/responses/amazon.js
@@ -68,11 +68,11 @@ responses.set('text amazon ultimate question streamed', {
},
{
body: {
- 'outputText': '',
- 'index': 0,
- 'totalOutputTextTokenCount': 75,
- 'completionReason': 'endoftext',
- 'inputTextTokenCount': null,
+ outputText: '',
+ index: 0,
+ totalOutputTextTokenCount: 75,
+ completionReason: 'endoftext',
+ inputTextTokenCount: null,
'amazon-bedrock-invocationMetrics': {
inputTokenCount: 8,
outputTokenCount: 4,
diff --git a/test/lib/aws-server-stubs/ai-server/responses/claude.js b/test/lib/aws-server-stubs/ai-server/responses/claude.js
index f54c3f6aba..9571a2f535 100644
--- a/test/lib/aws-server-stubs/ai-server/responses/claude.js
+++ b/test/lib/aws-server-stubs/ai-server/responses/claude.js
@@ -60,9 +60,9 @@ responses.set('text claude ultimate question streamed', {
},
{
body: {
- 'completion': '',
- 'stop_reason': 'endoftext',
- 'stop': '\n\nHuman:',
+ completion: '',
+ stop_reason: 'endoftext',
+ stop: '\n\nHuman:',
'amazon-bedrock-invocationMetrics': {
inputTokenCount: 8,
outputTokenCount: 4,
diff --git a/test/lib/aws-server-stubs/ai-server/responses/claude3.js b/test/lib/aws-server-stubs/ai-server/responses/claude3.js
index 8a317ff412..2e0fbf0505 100644
--- a/test/lib/aws-server-stubs/ai-server/responses/claude3.js
+++ b/test/lib/aws-server-stubs/ai-server/responses/claude3.js
@@ -124,7 +124,7 @@ responses.set('text claude3 ultimate question streamed', {
{
body: {
type: 'message_stop',
- ['amazon-bedrock-invocationMetrics']: {
+ 'amazon-bedrock-invocationMetrics': {
inputTokenCount: 8,
outputTokenCount: 4,
invocationLatency: 511,
diff --git a/test/lib/aws-server-stubs/ai-server/responses/cohere.js b/test/lib/aws-server-stubs/ai-server/responses/cohere.js
index b19b9ab061..0b1a236a0b 100644
--- a/test/lib/aws-server-stubs/ai-server/responses/cohere.js
+++ b/test/lib/aws-server-stubs/ai-server/responses/cohere.js
@@ -45,15 +45,15 @@ responses.set('text cohere ultimate question streamed', {
':message-type': { type: 'string', value: 'event' }
},
body: {
- 'generations': [
+ generations: [
{
finish_reason: 'endoftext',
id: 'f4ca64e7-93ce-4722-bebe-2d383440dedf',
text: '42'
}
],
- 'id': '1234',
- 'prompt': 'What is the answer to life, the universe, and everything?',
+ id: '1234',
+ prompt: 'What is the answer to life, the universe, and everything?',
'amazon-bedrock-invocationMetrics': {
inputTokenCount: 8,
outputTokenCount: 4,
@@ -98,12 +98,12 @@ responses.set('embed text cohere stream', {
':message-type': { type: 'string', value: 'event' }
},
body: {
- 'embeddings': [
+ embeddings: [
[-0.019012451, 0.031707764, -0.053985596, -0.034484863, 0.019058228, -0.008850098],
[-2.2888184e-4, 0.02166748, -0.009109497, -0.04159546, -0.023513794, -0.007965088]
],
- 'id': 'fbd3923c-3071-4ece-8761-6ba78058f747',
- 'texts': ['foo', 'bar'],
+ id: 'fbd3923c-3071-4ece-8761-6ba78058f747',
+ texts: ['foo', 'bar'],
'amazon-bedrock-invocationMetrics': {
inputTokenCount: 4,
outputTokenCount: 8,
diff --git a/test/lib/aws-server-stubs/ai-server/responses/llama.js b/test/lib/aws-server-stubs/ai-server/responses/llama.js
index cf5792af4d..16775b5336 100644
--- a/test/lib/aws-server-stubs/ai-server/responses/llama.js
+++ b/test/lib/aws-server-stubs/ai-server/responses/llama.js
@@ -53,10 +53,10 @@ responses.set('text llama ultimate question streamed', {
':message-type': { type: 'string', value: 'event' }
},
body: {
- 'generation': '',
- 'prompt_token_count': null,
- 'generation_token_count': 212,
- 'stop_reason': 'endoftext',
+ generation: '',
+ prompt_token_count: null,
+ generation_token_count: 212,
+ stop_reason: 'endoftext',
'amazon-bedrock-invocationMetrics': {
inputTokenCount: 8,
outputTokenCount: 4,
diff --git a/test/lib/aws-server-stubs/empty-response-server/index.js b/test/lib/aws-server-stubs/empty-response-server/index.js
index b14f27bac4..f9ea1eff6e 100644
--- a/test/lib/aws-server-stubs/empty-response-server/index.js
+++ b/test/lib/aws-server-stubs/empty-response-server/index.js
@@ -23,7 +23,6 @@ function createEmptyResponseServer() {
// sometimes the aws-sdk will obfuscate this error
// so logging out.
- // eslint-disable-next-line no-console
console.log('Unhandled request method: ', req.method)
res.statusCode = 500
diff --git a/test/lib/aws-server-stubs/response-server/index.js b/test/lib/aws-server-stubs/response-server/index.js
index 051682a661..184da75c0f 100644
--- a/test/lib/aws-server-stubs/response-server/index.js
+++ b/test/lib/aws-server-stubs/response-server/index.js
@@ -73,7 +73,6 @@ function handlePost(req, res) {
getDataFunction((err, data) => {
if (err) {
res.statusCode = 500
- // eslint-disable-next-line no-console
console.log(err)
}
diff --git a/test/lib/benchmark.js b/test/lib/benchmark.js
index 8a3be41562..d982f63e4a 100644
--- a/test/lib/benchmark.js
+++ b/test/lib/benchmark.js
@@ -33,19 +33,20 @@ class Benchmark {
processSamples() {
const samples = this.samples
- return (this.processedSamples = Object.keys(samples).reduce((acc, sampleName) => {
+ this.processedSamples = Object.keys(samples).reduce((acc, sampleName) => {
try {
acc[sampleName] = new BenchmarkStats(samples[sampleName], this.name, sampleName)
return acc
} catch (e) {
- /* eslint-disable no-console */
console.error(e)
}
- }, {}))
+ return undefined
+ }, {})
+ return this.processedSamples
}
print() {
- console.log(JSON.stringify(this.processSamples(), null, 2)) // eslint-disable-line
+ console.log(JSON.stringify(this.processSamples(), null, 2))
}
async run() {
@@ -134,7 +135,7 @@ class Benchmark {
const samples = []
for (let i = 0; i < test.runs; i++) {
- await runTest(i, test, (err, delta) => {
+ await runTest(i, test, (_, delta) => {
samples.push(delta)
}) // reliant on callback; should refactor test simply to return delta
}
diff --git a/test/lib/broken_instrumentation_module/nr-hooks.js b/test/lib/broken_instrumentation_module/nr-hooks.js
index c23eea157f..7081bc94af 100644
--- a/test/lib/broken_instrumentation_module/nr-hooks.js
+++ b/test/lib/broken_instrumentation_module/nr-hooks.js
@@ -5,4 +5,4 @@
'use strict'
-throw 'boom'
+throw Error('boom')
diff --git a/test/lib/custom-assertions/assert-segments.js b/test/lib/custom-assertions/assert-segments.js
index a56d57257c..0bdaa7aac1 100644
--- a/test/lib/custom-assertions/assert-segments.js
+++ b/test/lib/custom-assertions/assert-segments.js
@@ -77,8 +77,12 @@
* directly under test. Only used when `exact` is true.
* @param {object} [deps] Injected dependencies.
* @param {object} [deps.assert] Assertion library to use.
+ * @param options
+ * @param root0
+ * @param root0.assert
+ * @param options.assert
*/
-module.exports = function assertSegments(
+module.exports = function assertSegments( // eslint-disable-line sonarjs/cognitive-complexity
parent,
expected,
options,
diff --git a/test/lib/custom-assertions/index.js b/test/lib/custom-assertions/index.js
index 910959e195..090e5e3290 100644
--- a/test/lib/custom-assertions/index.js
+++ b/test/lib/custom-assertions/index.js
@@ -19,7 +19,6 @@ for (const entry of entries) {
const fn = require(`./${entry.name}`)
module.exports[fn.name] = fn
} catch (error) {
- /* eslint-disable-next-line */
console.log(`could not load ${entry.name}: ${error.message}`)
throw error
}
diff --git a/test/lib/custom-assertions/match.js b/test/lib/custom-assertions/match.js
index a65008e2a6..a08cb72523 100644
--- a/test/lib/custom-assertions/match.js
+++ b/test/lib/custom-assertions/match.js
@@ -52,6 +52,7 @@ module.exports = function match(actual, expected, { assert = require('node:asser
if (key in actual) {
if (typeof expected[key] === 'function') {
const type = expected[key]
+ // eslint-disable-next-line valid-typeof
assert.ok(typeof actual[key] === TYPE_MAPPINGS[type.name])
} else if (expected[key] instanceof RegExp) {
assert.ok(expected[key].test(actual[key]))
diff --git a/test/lib/custom-assertions/not-has.js b/test/lib/custom-assertions/not-has.js
index 58dff1311b..61c3c6b71e 100644
--- a/test/lib/custom-assertions/not-has.js
+++ b/test/lib/custom-assertions/not-has.js
@@ -12,6 +12,7 @@ const get = require('../../../lib/util/get')
* by `doNotWant`.
*
* @param {object} params Input parameters
+ * @param params.found
* @param {object} found The object to test for absence.
* @param {string} doNotWant Dot separated path to a field that should not
* have a value.
@@ -19,6 +20,9 @@ const get = require('../../../lib/util/get')
* @param {object} [deps] Injected dependencies.
* @param {object} [deps.assert] Assertion library to use.
*
+ * @param params.doNotWant
+ * @param params.msg
+ * @param found.assert
* @throws {Error} When the `found` object contains a value at the specified
* `doNotWant` path.
*/
diff --git a/test/lib/fake-cert.js b/test/lib/fake-cert.js
index cf9e9278c6..ef92eb10cb 100644
--- a/test/lib/fake-cert.js
+++ b/test/lib/fake-cert.js
@@ -24,7 +24,7 @@ const selfCert = require('self-cert')
* subject alternate names.
*
* @param {object} params
- * @param {string|null} [params.commonName=null] The subject name for the
+ * @param {string|null} [params.commonName] The subject name for the
* certificate. This is useful when generating a certificate for remote hosts,
* e.g. when generating a proxy certificate for staging-collector.newrelic.com.
*
@@ -38,7 +38,7 @@ module.exports = function fakeCert({ commonName = null } = {}) {
// certificates.
bits: 1_024,
attrs: {
- commonName: commonName,
+ commonName,
stateName: 'Georgia',
locality: 'Atlanta',
orgName: 'New Relic',
diff --git a/test/lib/logging-helper.js b/test/lib/logging-helper.js
index 4631a7dcb4..70d93084cf 100644
--- a/test/lib/logging-helper.js
+++ b/test/lib/logging-helper.js
@@ -16,9 +16,10 @@ const CONTEXT_KEYS = ['trace.id', 'span.id']
* @param {object} params.log log line
* @param {string} params.message message in log line
* @param {number} params.level log level
+ * @param params.line
*/
function validateLogLine({ line: logLine, message, level }) {
- assert.equal(/[0-9]{10}/.test(logLine.timestamp), true, 'should have proper unix timestamp')
+ assert.equal(/\d{10}/.test(logLine.timestamp), true, 'should have proper unix timestamp')
assert.equal(
logLine.message.includes('NR-LINKING'),
false,
diff --git a/test/lib/nock/aws.js b/test/lib/nock/aws.js
index 100a95d0a9..7edaa561f7 100644
--- a/test/lib/nock/aws.js
+++ b/test/lib/nock/aws.js
@@ -8,6 +8,7 @@
const nock = require('nock')
module.exports.mockAWSInfo = function () {
+ // eslint-disable-next-line sonarjs/no-clear-text-protocols
const awsHost = 'http://169.254.169.254'
const awsResponses = {
'instance-type': 'test.type',
diff --git a/test/lib/promise-resolvers.js b/test/lib/promise-resolvers.js
index 01da435da7..f7fefd096b 100644
--- a/test/lib/promise-resolvers.js
+++ b/test/lib/promise-resolvers.js
@@ -13,16 +13,21 @@
* @returns {{resolve, reject, promise: Promise}}
*/
module.exports = function promiseResolvers() {
+ // We are disabling this lint rule because it complains about
+ // `withResolvers` not being available until Node 22. We know that.
+ // We are doing feature detection.
+ /* eslint-disable n/no-unsupported-features/es-syntax */
if (typeof Promise.withResolvers === 'function') {
// Node.js >=22 natively supports this.
return Promise.withResolvers()
}
+ /* eslint-enable n/no-unsupported-features/es-syntax */
let resolve
let reject
- const promise = new Promise((a, b) => {
- resolve = a
- reject = b
+ const promise = new Promise((_resolve, _reject) => {
+ resolve = _resolve
+ reject = _reject
})
return { promise, resolve, reject }
}
diff --git a/test/lib/promises/common-tests.js b/test/lib/promises/common-tests.js
index ebe7c91794..d32d0aafbd 100644
--- a/test/lib/promises/common-tests.js
+++ b/test/lib/promises/common-tests.js
@@ -9,6 +9,7 @@ const helper = require('../agent_helper')
const COUNT = 2
const { checkTransaction, end, runMultiple } = require('./helpers')
+/* eslint-disable sonarjs/no-globals-shadowing */
module.exports = function init({ t, agent, Promise }) {
return async function performTests(name, resolve, reject) {
const inTx = doPerformTests({ t, agent, Promise, name, resolve, reject, inTx: true })
diff --git a/test/lib/promises/transaction-state.js b/test/lib/promises/transaction-state.js
index b4d13737f4..90978b3cfb 100644
--- a/test/lib/promises/transaction-state.js
+++ b/test/lib/promises/transaction-state.js
@@ -10,9 +10,9 @@ const { tspl } = require('@matteo.collina/tspl')
const { checkTransaction } = require('./helpers')
const initSharedTests = require('./common-tests')
+/* eslint-disable sonarjs/no-globals-shadowing, sonarjs/prefer-promise-shorthand */
module.exports = async function runTests({ t, agent, Promise, library }) {
const performTests = initSharedTests({ t, agent, Promise })
- /* eslint-disable no-shadow, brace-style */
if (library) {
await performTests(
'Library Fullfillment Factories',
@@ -38,13 +38,13 @@ module.exports = async function runTests({ t, agent, Promise, library }) {
await performTests(
'New Synchronous',
function (Promise, val) {
- return new Promise(function (res) {
- res(val)
+ return new Promise(function (resolve) {
+ resolve(val)
})
},
function (Promise, err) {
- return new Promise(function (res, rej) {
- rej(err)
+ return new Promise(function (resolve, reject) {
+ reject(err)
})
}
)
@@ -52,16 +52,16 @@ module.exports = async function runTests({ t, agent, Promise, library }) {
await performTests(
'New Asynchronous',
function (Promise, val) {
- return new Promise(function (res) {
+ return new Promise(function (resolve) {
setTimeout(function () {
- res(val)
+ resolve(val)
}, 10)
})
},
function (Promise, err) {
- return new Promise(function (res, rej) {
+ return new Promise(function (resolve, reject) {
setTimeout(function () {
- rej(err)
+ reject(err)
}, 10)
})
}
diff --git a/test/lib/proxy-server.js b/test/lib/proxy-server.js
index 8c34216307..4811dad147 100644
--- a/test/lib/proxy-server.js
+++ b/test/lib/proxy-server.js
@@ -20,8 +20,9 @@ const connectResponse = [
/**
* An extension of core's `https.Server` with utilities specific to the proxy.
*
- * @extends http.Server
+ * @augments http.Server
* @typedef {object} ProxyServer
+ * @property
*/
/**
@@ -52,8 +53,8 @@ async function createProxyServer({ privateKey, certificate } = {}) {
*/
server.proxyUsed = false
- await new Promise((done) => {
- server.listen(0, '127.0.0.1', done)
+ await new Promise((resolve) => {
+ server.listen(0, '127.0.0.1', resolve)
})
const connections = []
diff --git a/test/lib/temp-override-uncaught.js b/test/lib/temp-override-uncaught.js
index a860f355a1..b8892e3c65 100644
--- a/test/lib/temp-override-uncaught.js
+++ b/test/lib/temp-override-uncaught.js
@@ -21,11 +21,14 @@ const oldListeners = {
* restoring the original listeners upon test completion.
*
* @param {object} params
+ * @param params.t
* @param {TestContext} t A `node:test` context object.
* @param {function} handler An error handler function that will replace all
* current listeners.
- * @param {string} [type='uncaughtException'] The kind of uncaught event to
+ * @param {string} [type] The kind of uncaught event to
* override.
+ * @param params.handler
+ * @param params.type
* @property {string} EXCEPTION Constant value usable for `type`.
* @property {string} REJECTION Constant value usable for `type`.
*/
diff --git a/test/lib/temp-remove-listeners.js b/test/lib/temp-remove-listeners.js
index add84af825..40db56a144 100644
--- a/test/lib/temp-remove-listeners.js
+++ b/test/lib/temp-remove-listeners.js
@@ -10,9 +10,12 @@
* and re-adds them subsequent to a test completing.
*
* @param {object} params
+ * @param params.t
* @param {TestContext} t A `node:test` test context.
* @param {EventEmitter} emitter The emitter to manipulate.
* @param {string} event The event name to target.
+ * @param params.emitter
+ * @param params.event
*/
module.exports = function tempRemoveListeners({ t, emitter, event }) {
if (!emitter) {
diff --git a/test/lib/test-collector-validators.js b/test/lib/test-collector-validators.js
index 94765bf819..2d3881ced8 100644
--- a/test/lib/test-collector-validators.js
+++ b/test/lib/test-collector-validators.js
@@ -27,7 +27,7 @@ class CollectorValidators {
}
// NewRelic-NodeAgent/0.9.1-46 (nodejs 0.8.12 darwin-x64)
- const userAgentPattern = /^NewRelic-[a-zA-Z0-9]+\/[0-9.\-]+ \(.+\)$/
+ const userAgentPattern = /^NewRelic-[a-zA-Z0-9]+\/[0-9.-]+ \(.+\)$/
if (userAgentPattern.test(req.getHeader('User-Agent')) === false) {
errors.push("'User-Agent' should conform to New Relic standards")
}
diff --git a/test/lib/test-collector.js b/test/lib/test-collector.js
index 6c85f5ee11..3550726236 100644
--- a/test/lib/test-collector.js
+++ b/test/lib/test-collector.js
@@ -18,11 +18,13 @@ const CollectorValidators = require('./test-collector-validators')
/**
* Extends {@link http.IncomingMessage} with convenience properties and methods.
* @typedef {object} CollectorIncomingRequest
+ * @property
*/
/**
* Extends {@link http.OutgoingMessage} with convenience properties and methods.
* @typedef {object} CollectorOutgoingResponse
+ * @property
*/
/**
@@ -57,7 +59,7 @@ class Collector {
*
* @param {object} params
* @param {object} params.payload The object to serialize into a response.
- * @param {number} [params.code=200] The status code to use for the
+ * @param {number} [params.code] The status code to use for the
* response.
* @memberof CollectorOutgoingResponse
*/
@@ -83,9 +85,9 @@ class Collector {
req.body = function () {
let resolve
let reject
- const promise = new Promise((res, rej) => {
- resolve = res
- reject = rej
+ const promise = new Promise((_resolve, _reject) => {
+ resolve = _resolve
+ reject = _reject
})
let data = ''
diff --git a/test/lib/test-reporter.mjs b/test/lib/test-reporter.mjs
index 10f3887c3e..4cbf2fc9d1 100644
--- a/test/lib/test-reporter.mjs
+++ b/test/lib/test-reporter.mjs
@@ -95,7 +95,8 @@ class Tracker extends Map {
}
}
-async function* reporter(source) {
+// eslint-disable-next-line sonarjs/cognitive-complexity
+async function * reporter(source) {
const tracker = new Tracker()
for await (const event of source) {
diff --git a/test/smoke/client-s3.test.js b/test/smoke/client-s3.test.js
index 1e0944d6c3..13e5f3b4ce 100644
--- a/test/smoke/client-s3.test.js
+++ b/test/smoke/client-s3.test.js
@@ -13,7 +13,7 @@ const {
test('@aws-sdk/client-s3 functionality', async (t) => {
const { version, name } = require('@aws-sdk/client-s3/package')
- // eslint-disable-next-line no-console
+
console.log(`AWS package: ${name} version: ${version}`)
const agent = helper.instrumentMockedAgent()
const { S3, ...lib } = require('@aws-sdk/client-s3')
diff --git a/test/smoke/e2e/express.test.js b/test/smoke/e2e/express.test.js
index 18c2bb4ca9..b4a0bcc538 100644
--- a/test/smoke/e2e/express.test.js
+++ b/test/smoke/e2e/express.test.js
@@ -26,7 +26,7 @@ test('Express e2e request smoke test', (t, end) => {
server.on('message', function incommingMessage(port) {
const options = {
hostname: 'localhost',
- port: port,
+ port,
path: '/',
method: 'GET'
}
diff --git a/test/smoke/index/index-bad-version.test.js b/test/smoke/index/index-bad-version.test.js
index b1b40a6df5..3d7903166a 100644
--- a/test/smoke/index/index-bad-version.test.js
+++ b/test/smoke/index/index-bad-version.test.js
@@ -4,15 +4,17 @@
*/
'use strict'
+
const test = require('node:test')
const assert = require('node:assert')
+const path = require('node:path')
const { getTestSecret } = require('../../helpers/secrets')
const StubApi = require('../../../stub_api')
const license = getTestSecret('TEST_LICENSE')
const VERSIONS = ['garbage', '4.0.0']
test('load agent with bad versions should load stub agent', async (t) => {
- process.env.NEW_RELIC_HOME = __dirname + '/..'
+ process.env.NEW_RELIC_HOME = path.join(__dirname, '..')
process.env.NEW_RELIC_HOST = 'staging-collector.newrelic.com'
process.env.NEW_RELIC_LICENSE_KEY = license
diff --git a/test/smoke/index/index.test.js b/test/smoke/index/index.test.js
index 9f1b8f360b..41cb3d2b4c 100644
--- a/test/smoke/index/index.test.js
+++ b/test/smoke/index/index.test.js
@@ -6,13 +6,14 @@
'use strict'
const test = require('node:test')
const assert = require('node:assert')
+const path = require('node:path')
const { getTestSecret } = require('../../helpers/secrets')
const license = getTestSecret('TEST_LICENSE')
test('loading the application via index.js', { timeout: 15000 }, (t, end) => {
let agent = null
- process.env.NEW_RELIC_HOME = __dirname + '/..'
+ process.env.NEW_RELIC_HOME = path.join(__dirname, '..')
process.env.NEW_RELIC_HOST = 'staging-collector.newrelic.com'
process.env.NEW_RELIC_LICENSE_KEY = license
diff --git a/test/smoke/s3-presigned-url.test.js b/test/smoke/s3-presigned-url.test.js
index d0c5372081..8633c4a556 100644
--- a/test/smoke/s3-presigned-url.test.js
+++ b/test/smoke/s3-presigned-url.test.js
@@ -11,7 +11,7 @@ const https = require('https')
test('@aws-sdk/s3-request-presigner functionality', (t, end) => {
const { version, name } = require('@aws-sdk/s3-request-presigner/package')
- // eslint-disable-next-line no-console
+
console.log(`AWS package: ${name} version: ${version}`)
const agent = helper.instrumentMockedAgent()
const { S3, ...lib } = require('@aws-sdk/client-s3')
diff --git a/test/unit/agent/agent.test.js b/test/unit/agent/agent.test.js
index 8a43178785..85f802f120 100644
--- a/test/unit/agent/agent.test.js
+++ b/test/unit/agent/agent.test.js
@@ -591,7 +591,7 @@ test('when connected', async (t) => {
})
})
- function setupAggregators({ enableAggregator: enableAggregator = true, agent, collector }) {
+ function setupAggregators({ enableAggregator = true, agent, collector }) {
agent.config.application_logging.enabled = enableAggregator
agent.config.application_logging.forwarding.enabled = enableAggregator
agent.config.slow_sql.enabled = enableAggregator
diff --git a/test/unit/aggregators/log-aggregator.test.js b/test/unit/aggregators/log-aggregator.test.js
index db2c77fe18..9065c4dcd2 100644
--- a/test/unit/aggregators/log-aggregator.test.js
+++ b/test/unit/aggregators/log-aggregator.test.js
@@ -20,7 +20,7 @@ test('Log Aggregator', async (t) => {
ctx.nr.txReturn = undefined
ctx.nr.commonAttrs = {
'entity.guid': 'MTkwfEFQTXxBUFBMSUNBVElPTnwyMjUzMDY0Nw',
- 'hostname': 'test-host',
+ hostname: 'test-host',
'entity.name': 'unit-test',
'entity.type': 'SERVICE'
}
@@ -62,12 +62,12 @@ test('Log Aggregator', async (t) => {
ctx.nr.logEventAggregator = new LogAggregator({ runId: RUN_ID, limit: LIMIT }, ctx.nr.agent)
ctx.nr.log = {
- 'level': 30,
- 'timestamp': '1649689872369',
- 'pid': 4856,
+ level: 30,
+ timestamp: '1649689872369',
+ pid: 4856,
'trace.id': '2f93639c684a2dd33c28345173d218b8',
'span.id': 'a136d77f2a5b997b',
- 'message': 'unit test msg'
+ message: 'unit test msg'
}
})
@@ -116,7 +116,7 @@ test('Log Aggregator', async (t) => {
return JSON.parse(log2)
}
function formatLog2() {
- return
+
}
logEventAggregator.add(log)
logEventAggregator.add(formatLog)
@@ -135,9 +135,7 @@ test('Log Aggregator', async (t) => {
await t.test('toPayload() should return nothing when log functions return no data', (t) => {
const { logEventAggregator } = t.nr
- function formatLog() {
- return
- }
+ function formatLog() {}
logEventAggregator.add(formatLog)
const payload = logEventAggregator._toPayloadSync()
assert.equal(payload, undefined)
diff --git a/test/unit/analytics_events.test.js b/test/unit/analytics_events.test.js
index 3be4c6f95d..bb6d1d23b4 100644
--- a/test/unit/analytics_events.test.js
+++ b/test/unit/analytics_events.test.js
@@ -244,7 +244,7 @@ test('on transaction finished', async (t) => {
initiator: 'cli',
attributes: {
'Attr-Test': 'value',
- 'attr2Test': 'value1',
+ attr2Test: 'value1',
'xTest-Header': 'value2'
}
}
diff --git a/test/unit/api/api-instrument-loaded-module.test.js b/test/unit/api/api-instrument-loaded-module.test.js
index 7b4f82db56..7dc0497be4 100644
--- a/test/unit/api/api-instrument-loaded-module.test.js
+++ b/test/unit/api/api-instrument-loaded-module.test.js
@@ -97,9 +97,8 @@ test('Agent API - instrumentLoadedModule', async (t) => {
const EMPTY_MODULE = {}
let mod = EMPTY_MODULE
try {
- // eslint-disable-next-line node/no-missing-require
mod = require('mysql')
- } catch (e) {}
+ } catch {}
assert.ok(mod === EMPTY_MODULE, 'mysql is not installed')
// attempt to instrument -- if nothing throws we're good
diff --git a/test/unit/api/api-instrument-messages.test.js b/test/unit/api/api-instrument-messages.test.js
index 299120b66e..1d2afb699f 100644
--- a/test/unit/api/api-instrument-messages.test.js
+++ b/test/unit/api/api-instrument-messages.test.js
@@ -4,8 +4,10 @@
*/
'use strict'
+
const test = require('node:test')
const assert = require('node:assert')
+const path = require('node:path')
const API = require('../../../api')
const helper = require('../../lib/agent_helper')
const sinon = require('sinon')
@@ -30,7 +32,7 @@ test('Agent API - instrumentMessages', async (t) => {
const { api } = t.nr
const opts = {
moduleName: 'foobar',
- absolutePath: `${__dirname}/foobar`,
+ absolutePath: path.join(__dirname, 'foobar'),
onRequire: function () {}
}
api.instrumentMessages(opts)
diff --git a/test/unit/api/api-llm.test.js b/test/unit/api/api-llm.test.js
index 96d5fc6afe..9d7437e22e 100644
--- a/test/unit/api/api-llm.test.js
+++ b/test/unit/api/api-llm.test.js
@@ -104,7 +104,7 @@ test('Agent API LLM methods', async (t) => {
assert.equal(result, undefined)
assert.equal(loggerMock.warn.callCount, 0)
assert.equal(event.name, 'LlmFeedbackMessage')
- assert.match(event.data.id, /[\w\d]{32}/)
+ assert.match(event.data.id, /\w{32}/)
// remove from object as it was just asserted via regex
delete event.data.id
assert.deepEqual(event.data, {
@@ -170,12 +170,12 @@ test('Agent API LLM methods', async (t) => {
}
api.withLlmCustomAttributes(
{
- 'toRename': 'value1',
+ toRename: 'value1',
'llm.number': 1,
'llm.boolean': true,
- 'toDelete': () => {},
- 'toDelete2': {},
- 'toDelete3': []
+ toDelete: () => {},
+ toDelete2: {},
+ toDelete3: []
},
() => {
const contextManager = tx._llmContextManager
@@ -205,20 +205,20 @@ test('Agent API LLM methods', async (t) => {
() => {
const contextManager = tx._llmContextManager
const context = contextManager.getStore()
- assert.equal(context[`llm.step`], '1')
+ assert.equal(context['llm.step'], '1')
assert.equal(context['llm.path'], 'root')
assert.equal(context['llm.name'], 'root')
api.withLlmCustomAttributes({ 'llm.step': '1.1', 'llm.path': 'root/1' }, () => {
const contextManager2 = tx._llmContextManager
const context2 = contextManager2.getStore()
- assert.equal(context2[`llm.step`], '1.1')
+ assert.equal(context2['llm.step'], '1.1')
assert.equal(context2['llm.path'], 'root/1')
assert.equal(context2['llm.name'], 'root')
})
api.withLlmCustomAttributes({ 'llm.step': '1.2', 'llm.path': 'root/2' }, () => {
const contextManager3 = tx._llmContextManager
const context3 = contextManager3.getStore()
- assert.equal(context3[`llm.step`], '1.2')
+ assert.equal(context3['llm.step'], '1.2')
assert.equal(context3['llm.path'], 'root/2')
assert.equal(context3['llm.name'], 'root')
end()
diff --git a/test/unit/api/api-obfuscate-sql.test.js b/test/unit/api/api-obfuscate-sql.test.js
index dca517abc4..5565904593 100644
--- a/test/unit/api/api-obfuscate-sql.test.js
+++ b/test/unit/api/api-obfuscate-sql.test.js
@@ -17,7 +17,7 @@ test('Agent API - obfuscateSql', (t, end) => {
helper.unloadAgent(agent)
})
- const sql = `select * from foo where a='b' and c=100;`
+ const sql = "select * from foo where a='b' and c=100;"
const obfuscated = api.obfuscateSql(sql, 'postgres')
assert.equal(obfuscated, 'select * from foo where a=? and c=?;')
end()
diff --git a/test/unit/api/stub.test.js b/test/unit/api/stub.test.js
index 6419188477..e59a137c25 100644
--- a/test/unit/api/stub.test.js
+++ b/test/unit/api/stub.test.js
@@ -23,6 +23,7 @@ test('Agent API - Stubbed Agent API', async (t) => {
/**
* This tests that every API method is a function and
* does not throw when calling it
+ * @param name
*/
async function testApiStubMethod(name) {
await t.test(`should export a stub of API#${name}`, (t, end) => {
diff --git a/test/unit/collector/api-connect.test.js b/test/unit/collector/api-connect.test.js
index 0a15333e14..7930b52e99 100644
--- a/test/unit/collector/api-connect.test.js
+++ b/test/unit/collector/api-connect.test.js
@@ -137,6 +137,7 @@ test('succeeds when given a different port number for redirect', async (t) => {
await t.test('should have a run ID', (t, end) => {
const { collectorApi } = t.nr
collectorApi.connect((error, res) => {
+ assert.ifError(error)
assert.equal(res.payload.agent_run_id, RUN_ID)
end()
})
@@ -145,6 +146,7 @@ test('succeeds when given a different port number for redirect', async (t) => {
await t.test('should pass through server-side configuration untouched', (t, end) => {
const { collectorApi } = t.nr
collectorApi.connect((error, res) => {
+ assert.ifError(error)
assert.deepStrictEqual(res.payload, { agent_run_id: RUN_ID })
end()
})
@@ -198,6 +200,7 @@ for (const retryCount of retryCounts) {
await t.test('should not error out', (t, end) => {
const { collectorApi } = t.nr
collectorApi.connect((error) => {
+ assert.ifError(error)
assert.equal(error, undefined)
end()
})
@@ -206,6 +209,7 @@ for (const retryCount of retryCounts) {
await t.test('should have a run ID', (t, end) => {
const { collectorApi } = t.nr
collectorApi.connect((error, res) => {
+ assert.ifError(error)
assert.equal(res.payload.agent_run_id, RUN_ID)
end()
})
@@ -214,6 +218,7 @@ for (const retryCount of retryCounts) {
await t.test('should pass through server-side configuration untouched', (t, end) => {
const { collectorApi } = t.nr
collectorApi.connect((error, res) => {
+ assert.ifError(error)
assert.deepStrictEqual(res.payload, { agent_run_id: RUN_ID })
end()
})
@@ -264,6 +269,7 @@ test('disconnects on force disconnect (410)', async (t) => {
await t.test('should not have a response body', (t, end) => {
const { collector, collectorApi } = t.nr
collectorApi.connect((error, res) => {
+ assert.ifError(error)
assert.equal(res.payload, undefined)
assert.equal(collector.isDone('preconnect'), true)
end()
@@ -271,7 +277,7 @@ test('disconnects on force disconnect (410)', async (t) => {
})
})
-test(`retries preconnect until forced to disconnect (410)`, async (t) => {
+test('retries preconnect until forced to disconnect (410)', async (t) => {
const retryCount = 500
const exception = {
exception: {
@@ -319,6 +325,7 @@ test(`retries preconnect until forced to disconnect (410)`, async (t) => {
await t.test('should have received shutdown response', (t, end) => {
const { collectorApi } = t.nr
collectorApi.connect((error, res) => {
+ assert.ifError(error)
const shutdownCommand = CollectorResponse.AGENT_RUN_BEHAVIOR.SHUTDOWN
assert.deepStrictEqual(res.agentRun, shutdownCommand)
end()
@@ -326,7 +333,7 @@ test(`retries preconnect until forced to disconnect (410)`, async (t) => {
})
})
-test(`retries on receiving invalid license key (401)`, async (t) => {
+test('retries on receiving invalid license key (401)', async (t) => {
const retryCount = 5
t.beforeEach(async (ctx) => {
@@ -376,6 +383,7 @@ test(`retries on receiving invalid license key (401)`, async (t) => {
await t.test('should call the expected number of times', (t, end) => {
const { collectorApi } = t.nr
collectorApi.connect((error, res) => {
+ assert.ifError(error)
assert.equal(t.nr.retries, 5)
assert.equal(res.payload.agent_run_id, 31338)
end()
@@ -383,7 +391,7 @@ test(`retries on receiving invalid license key (401)`, async (t) => {
})
})
-test(`retries on misconfigured proxy`, async (t) => {
+test('retries on misconfigured proxy', async (t) => {
// We are using `nock` for these tests because it provides its own socket
// implementation that is able to fake a bad connection to a server.
// Basically, these tests are attempting to verify conditions around
@@ -452,16 +460,18 @@ test(`retries on misconfigured proxy`, async (t) => {
await t.test('should log warning when proxy is misconfigured', (t, end) => {
const { collectorApi } = t.nr
collectorApi.connect((error, res) => {
+ assert.ifError(error)
assert.equal(t.nr.failure.isDone(), true)
assert.equal(t.nr.success.isDone(), true)
assert.equal(t.nr.connect.isDone(), true)
assert.equal(res.payload.agent_run_id, 31338)
- const expectErrorMsg =
- 'Your proxy server appears to be configured to accept connections \
-over http. When setting `proxy_host` and `proxy_port` New Relic attempts to connect over \
-SSL(https). If your proxy is configured to accept connections over http, try setting `proxy` \
-to a fully qualified URL(e.g http://proxy-host:8080).'
+ const expectErrorMsg = [
+ 'Your proxy server appears to be configured to accept connections ',
+ 'over http. When setting `proxy_host` and `proxy_port` New Relic attempts to connect over ',
+ 'SSL(https). If your proxy is configured to accept connections over http, try setting `proxy` ',
+ 'to a fully qualified URL(e.g http://proxy-host:8080).'
+ ].join('')
assert.deepStrictEqual(
t.nr.logs,
[[expectedError, expectErrorMsg]],
@@ -478,6 +488,7 @@ to a fully qualified URL(e.g http://proxy-host:8080).'
const { collectorApi } = t.nr
collectorApi._agent.config.proxy = 'http://test-proxy-server:8080'
collectorApi.connect((error, res) => {
+ assert.ifError(error)
assert.equal(t.nr.failure.isDone(), true)
assert.equal(t.nr.success.isDone(), true)
assert.equal(t.nr.connect.isDone(), true)
diff --git a/test/unit/collector/api-login.test.js b/test/unit/collector/api-login.test.js
index b94ff3b410..fc5170d231 100644
--- a/test/unit/collector/api-login.test.js
+++ b/test/unit/collector/api-login.test.js
@@ -446,6 +446,7 @@ test('receiving no config back from connect', async (t) => {
await t.test('should pass along no server-side configuration from collector', (t, end) => {
const { collectorApi } = t.nr
collectorApi._login((error, res) => {
+ assert.equal(error.message, 'No agent run ID received from handshake.')
assert.equal(res.payload, undefined)
end()
})
diff --git a/test/unit/collector/api.test.js b/test/unit/collector/api.test.js
index 8dd5fbba54..56a497ee0c 100644
--- a/test/unit/collector/api.test.js
+++ b/test/unit/collector/api.test.js
@@ -45,6 +45,7 @@ test('reportSettings', async (t) => {
await t.test('should return the expected `empty` response', (t, end) => {
const { collectorApi } = t.nr
collectorApi.reportSettings((error, res) => {
+ assert.ifError(error)
assert.deepStrictEqual(res.payload, [])
end()
})
@@ -186,19 +187,19 @@ const apiMethods = [
[
{
'error.expected': false,
- 'traceId': '2714fa36883e18f6',
+ traceId: '2714fa36883e18f6',
'error.class': 'I am an error',
- 'type': 'TransactionError',
- 'transactionName': 'OtherTransaction/Custom/Simple/sqlTransaction',
- 'priority': 1.205386,
- 'duration': 0.001,
+ type: 'TransactionError',
+ transactionName: 'OtherTransaction/Custom/Simple/sqlTransaction',
+ priority: 1.205386,
+ duration: 0.001,
'nr.transactionGuid': '2714fa36883e18f6',
- 'port': 8080,
+ port: 8080,
'error.message': 'I am an error',
- 'guid': '2714fa36883e18f6',
+ guid: '2714fa36883e18f6',
'nr.tripId': '2714fa36883e18f6',
- 'sampled': true,
- 'timestamp': '1543864407859'
+ sampled: true,
+ timestamp: '1543864407859'
},
{
test: 'metric'
@@ -262,7 +263,7 @@ const apiMethods = [
0,
'OtherTransaction/Custom/Simple/sqlTransaction',
'Custom/Simple/sqlTransaction',
- `[1543864412869,{},{},[0,1,'ROOT',{'async_context':'main','exclusive_duration_millis':0.886261},[[0,1,'Java/Simple/sqlTransaction',{'async_context':'main','exclusive_duration_millis':0.886261},[],'Simple','sqlTransaction']],'Simple','sqlTransaction'],{'userAttributes':{'test':'metric'},'intrinsics':{'traceId':'731f4eebda5f292c','guid':'731f4eebda5f292c','priority':1.825609,'sampled':true,'totalTime':8.86261E-4},'agentAttributes':{'request.uri':'Custom/Simple/sqlTransaction','jvm.thread_name':'main'}}]`,
+ "[1543864412869,{},{},[0,1,'ROOT',{'async_context':'main','exclusive_duration_millis':0.886261},[[0,1,'Java/Simple/sqlTransaction',{'async_context':'main','exclusive_duration_millis':0.886261},[],'Simple','sqlTransaction']],'Simple','sqlTransaction'],{'userAttributes':{'test':'metric'},'intrinsics':{'traceId':'731f4eebda5f292c','guid':'731f4eebda5f292c','priority':1.825609,'sampled':true,'totalTime':8.86261E-4},'agentAttributes':{'request.uri':'Custom/Simple/sqlTransaction','jvm.thread_name':'main'}}]",
'731f4eebda5f292c',
null,
false
@@ -274,17 +275,17 @@ const apiMethods = [
data: [
[
{
- 'traceId': 'd959974e17abe2b5',
- 'duration': 0.011713522,
- 'name': 'Nodejs/Test/span',
- 'guid': 'b5ca3c76520b680a',
- 'type': 'Span',
- 'category': 'generic',
- 'priority': 1.9650071,
- 'sampled': true,
- 'transactionId': 'd959974e17abe2b5',
+ traceId: 'd959974e17abe2b5',
+ duration: 0.011713522,
+ name: 'Nodejs/Test/span',
+ guid: 'b5ca3c76520b680a',
+ type: 'Span',
+ category: 'generic',
+ priority: 1.9650071,
+ sampled: true,
+ transactionId: 'd959974e17abe2b5',
'nr.entryPoint': true,
- 'timestamp': 1543864402820
+ timestamp: 1543864402820
},
{},
{}
@@ -301,15 +302,15 @@ const apiMethods = [
{
logs: [
{
- 'timestamp': '1649353816647',
+ timestamp: '1649353816647',
'log.level': 'INFO',
- 'message': 'Unit testing',
+ message: 'Unit testing',
'span.id': '1122334455',
'trace.id': 'aabbccddee'
}
],
common: {
- attributes: { 'entity.guid': 'guid', 'entity.name': 'test app', 'hostname': 'test-host' }
+ attributes: { 'entity.guid': 'guid', 'entity.name': 'test app', hostname: 'test-host' }
}
}
]
diff --git a/test/unit/collector/facts.test.js b/test/unit/collector/facts.test.js
index a5f3c41678..d93289d04c 100644
--- a/test/unit/collector/facts.test.js
+++ b/test/unit/collector/facts.test.js
@@ -632,7 +632,7 @@ test('display_host facts', async (t) => {
ctx.nr.osNetworkInterfaces = os.networkInterfaces
ctx.nr.osHostname = os.hostname
os.hostname = () => {
- throw 'BROKEN'
+ throw Error('BROKEN')
}
})
diff --git a/test/unit/collector/parse-response.test.js b/test/unit/collector/parse-response.test.js
index a4ca94f602..4db92448cf 100644
--- a/test/unit/collector/parse-response.test.js
+++ b/test/unit/collector/parse-response.test.js
@@ -35,6 +35,7 @@ test('when initialized properly and response status is 200', async (t) => {
await t.test('should pass through return value', (t, end) => {
const parser = parse(methodName, response, (error, res) => {
+ assert.ifError(error)
assert.deepStrictEqual(res.payload, [1, 1, 2, 3, 5, 8])
end()
})
@@ -43,6 +44,7 @@ test('when initialized properly and response status is 200', async (t) => {
await t.test('should pass through status code', (t, end) => {
const parser = parse(methodName, response, (error, res) => {
+ assert.ifError(error)
assert.deepStrictEqual(res.status, 200)
end()
})
@@ -51,6 +53,7 @@ test('when initialized properly and response status is 200', async (t) => {
await t.test('should pass through even a null return value', (t, end) => {
const parser = parse(methodName, response, (error, res) => {
+ assert.ifError(error)
assert.equal(res.payload, null)
end()
})
diff --git a/test/unit/config/config-env.test.js b/test/unit/config/config-env.test.js
index 650f13aa01..cc792b8fc5 100644
--- a/test/unit/config/config-env.test.js
+++ b/test/unit/config/config-env.test.js
@@ -277,7 +277,6 @@ test('when overriding configuration values via environment variables', async (t)
err: 'error'
}
- // eslint-disable-next-line guard-for-in
for (const key in logAliases) {
idempotentEnv({ NEW_RELIC_LOG_LEVEL: key }, (tc) => {
assert.equal(tc.logging.level, logAliases[key])
@@ -874,7 +873,7 @@ test('when overriding configuration values via environment variables', async (t)
await t.test('should convert NEW_RELIC_INSTRUMENTATION* accordingly', (t, end) => {
const env = {
NEW_RELIC_INSTRUMENTATION_IOREDIS_ENABLED: 'false',
- ['NEW_RELIC_INSTRUMENTATION_@GRPC/GRPC-JS_ENABLED']: 'false',
+ 'NEW_RELIC_INSTRUMENTATION_@GRPC/GRPC-JS_ENABLED': 'false',
NEW_RELIC_INSTRUMENTATION_KNEX_ENABLED: 'false'
}
idempotentEnv(env, (config) => {
diff --git a/test/unit/config/config-formatters.test.js b/test/unit/config/config-formatters.test.js
index 1f37f77b5c..b29f06c4e5 100644
--- a/test/unit/config/config-formatters.test.js
+++ b/test/unit/config/config-formatters.test.js
@@ -142,7 +142,7 @@ test('config formatters', async () => {
assert.equal(formatters.regex(val, loggerMock), null)
assert.equal(
loggerMock.error.args[0][0],
- `New Relic configurator could not validate regex: [a-z`
+ 'New Relic configurator could not validate regex: [a-z'
)
assert.match(loggerMock.error.args[1][0], /SyntaxError: Invalid regular expression/)
})
diff --git a/test/unit/config/config-location.test.js b/test/unit/config/config-location.test.js
index 39341effa5..10818b8ae8 100644
--- a/test/unit/config/config-location.test.js
+++ b/test/unit/config/config-location.test.js
@@ -207,6 +207,6 @@ test('Selecting config file path', async (t) => {
function createInvalidConfig(dir, filename) {
CONFIG_PATH = path.join(dir, filename)
- fs.writeFileSync(CONFIG_PATH, `exports.config = null.pleaseThrow`)
+ fs.writeFileSync(CONFIG_PATH, 'exports.config = null.pleaseThrow')
}
})
diff --git a/test/unit/config/config.test.js b/test/unit/config/config.test.js
index f2d5a1a7c4..a34c59facf 100644
--- a/test/unit/config/config.test.js
+++ b/test/unit/config/config.test.js
@@ -161,8 +161,8 @@ test('loggingLabels', async (t) => {
await t.test('should exclude labels regardless of case', () => {
const config = {
labels: {
- 'label1': 'value1',
- 'LABEL2': 'value2',
+ label1: 'value1',
+ LABEL2: 'value2',
'LABEL2-ALSO': 'value3'
},
application_logging: {
@@ -189,8 +189,8 @@ test('loggingLabels', async (t) => {
() => {
const config = {
labels: {
- 'label1': 'value1',
- 'LABEL2': 'value2',
+ label1: 'value1',
+ LABEL2: 'value2',
'LABEL2-ALSO': 'value3'
},
application_logging: {
diff --git a/test/unit/db/query-trace-aggregator.test.js b/test/unit/db/query-trace-aggregator.test.js
index e23ef76423..cf932ce0bb 100644
--- a/test/unit/db/query-trace-aggregator.test.js
+++ b/test/unit/db/query-trace-aggregator.test.js
@@ -31,7 +31,7 @@ test('Query Trace Aggregator', async (t) => {
let cbCalledWithNull = false
- const cb = (err, data) => {
+ const cb = (_, data) => {
if (data === null) {
cbCalledWithNull = true
}
@@ -309,6 +309,7 @@ test('Query Trace Aggregator', async (t) => {
addQuery(queries, 600, '/abc')
queries.prepareJSON(function preparedJSON(err, data) {
+ assert.ifError(err)
const sample = data[0]
codec.decode(sample[9], function decoded(error, params) {
@@ -334,6 +335,7 @@ test('Query Trace Aggregator', async (t) => {
addQuery(queries, 600, '/abc')
queries.prepareJSON(function preparedJSON(err, data) {
+ assert.ifError(err)
const sample = data[0]
const params = sample[9]
const keys = Object.keys(params)
diff --git a/test/unit/db/trace.test.js b/test/unit/db/trace.test.js
index 361fa978e4..558dcc5a33 100644
--- a/test/unit/db/trace.test.js
+++ b/test/unit/db/trace.test.js
@@ -41,6 +41,7 @@ test('SQL trace attributes', async (t) => {
tx._acceptDistributedTracePayload(payload)
agent.queries.add(tx.trace.root, 'postgres', 'select pg_sleep(1)', 'FAKE STACK')
agent.queries.prepareJSON((err, samples) => {
+ assert.ifError(err)
const sample = samples[0]
const attributes = sample[sample.length - 1]
assert.equal(attributes.traceId, tx.traceId)
@@ -87,6 +88,7 @@ test('SQL trace attributes', async (t) => {
helper.runInTransaction(agent, function (tx) {
agent.queries.add(tx.trace.root, 'postgres', 'select pg_sleep(1)', 'FAKE STACK')
agent.queries.prepareJSON((err, samples) => {
+ assert.ifError(err)
const sample = samples[0]
const attributes = sample[sample.length - 1]
assert.equal(attributes.traceId, tx.traceId)
diff --git a/test/unit/distributed_tracing/dt-cats.test.js b/test/unit/distributed_tracing/dt-cats.test.js
index 45ed17cc5f..c16570fbba 100644
--- a/test/unit/distributed_tracing/dt-cats.test.js
+++ b/test/unit/distributed_tracing/dt-cats.test.js
@@ -80,13 +80,13 @@ test('distributed tracing', async function (t) {
if (outbound.expected) {
outbound.expected.forEach((key) => {
- assert.ok(created.d.hasOwnProperty(keyRegex.exec(key)[1]))
+ assert.ok(Object.prototype.hasOwnProperty.call(created.d, keyRegex.exec(key)[1]))
})
}
if (outbound.unexpected) {
outbound.unexpected.forEach((key) => {
- assert.ok(!created.d.hasOwnProperty(keyRegex.exec(key)[1]))
+ assert.ok(!Object.prototype.hasOwnProperty.call(created.d, keyRegex.exec(key)[1]))
})
}
})
diff --git a/test/unit/distributed_tracing/tracecontext.test.js b/test/unit/distributed_tracing/tracecontext.test.js
index 3b4c4f9dfa..93098b0937 100644
--- a/test/unit/distributed_tracing/tracecontext.test.js
+++ b/test/unit/distributed_tracing/tracecontext.test.js
@@ -45,7 +45,7 @@ test('TraceContext', async function (t) {
await t.test('should accept valid trace context headers', (ctx) => {
const { traceContext } = ctx.nr
const traceparent = '00-00015f9f95352ad550284c27c5d3084c-00f067aa0ba902b7-00'
- // eslint-disable-next-line max-len
+
const tracestate = `33@nr=0-0-33-2827902-7d3efb1b173fecfa-e8b91a159289ff74-1-1.23456-${Date.now()}`
const tcd = traceContext.acceptTraceContextPayload(traceparent, tracestate)
@@ -258,7 +258,7 @@ test('TraceContext', async function (t) {
const { agent, traceContext } = ctx.nr
agent.config.trusted_account_key = '190'
const goodTraceStateHeader =
- /* eslint-disable-next-line max-len */
+
'190@nr=0-0-709288-8599547-f85f42fd82a4cf1d-164d3b4b0d09cb05-1-0.789-1563574856827,234234@foo=bar'
const valid = traceContext._validateAndParseTraceStateHeader(goodTraceStateHeader)
assert.ok(valid)
@@ -279,7 +279,7 @@ test('TraceContext', async function (t) {
const { agent, traceContext } = ctx.nr
agent.config.trusted_account_key = '190'
const goodTraceStateHeader =
- /* eslint-disable-next-line max-len */
+
'190@nr=0-0-709288-8599547-f85f42fd82a4cf1d-164d3b4b0d09cb05-1-0.789-1563574856827,234234@foo=bar'
const bufferTraceState = Buffer.from(goodTraceStateHeader, 'utf8')
const valid = traceContext._validateAndParseTraceStateHeader(bufferTraceState)
@@ -301,7 +301,7 @@ test('TraceContext', async function (t) {
const { agent, traceContext } = ctx.nr
agent.config.trusted_account_key = '666'
const badTraceStateHeader =
- /* eslint-disable-next-line max-len */
+
'190@nr=0-0-709288-8599547-f85f42fd82a4cf1d-164d3b4b0d09cb05-1-0.789-1563574856827,234234@foo=bar'
const valid = traceContext._validateAndParseTraceStateHeader(badTraceStateHeader)
@@ -315,7 +315,7 @@ test('TraceContext', async function (t) {
const { agent, traceContext } = ctx.nr
agent.config.trusted_account_key = '190'
const badTraceStateHeader =
- /* eslint-disable-next-line max-len */
+
'190@nr=0-0-709288-8599547-f85f42fd82a4cf1d-164d3b4b0d09cb05-1-0.789-1563574856827,234234@foobar'
const valid = traceContext._validateAndParseTraceStateHeader(badTraceStateHeader)
@@ -331,7 +331,7 @@ test('TraceContext', async function (t) {
const { agent, traceContext } = ctx.nr
agent.config.trusted_account_key = '190'
const badTimestamp =
- /* eslint-disable-next-line max-len */
+
'190@nr=0-0-709288-8599547-f85f42fd82a4cf1d-164d3b4b0d09cb05-1-0.789-,234234@foo=bar'
const valid = traceContext._validateAndParseTraceStateHeader(badTimestamp)
assert.equal(valid.entryFound, true)
@@ -342,7 +342,7 @@ test('TraceContext', async function (t) {
const { agent, traceContext } = ctx.nr
agent.config.trusted_account_key = '190'
const goodTraceStateHeader =
- /* eslint-disable-next-line max-len */
+
'190@nr=0-0-709288-8599547-f85f42fd82a4cf1d-164d3b4b0d09cb05---1563574856827,234234@foo=bar'
const valid = traceContext._validateAndParseTraceStateHeader(goodTraceStateHeader)
assert.ok(valid)
@@ -410,7 +410,7 @@ test('TraceContext', async function (t) {
const acctKey = '190'
agent.config.trusted_account_key = acctKey
const duplicateAcctTraceState =
- /* eslint-disable-next-line max-len */
+
'42@bar=foo,190@nr=0-0-709288-8599547-f85f42fd82a4cf1d-164d3b4b0d09cb05-1-0.789-1563574856827,190@nr=bar'
const traceparent = '00-00015f9f95352ad550284c27c5d3084c-00f067aa0ba902b7-00'
const appId = '109354'
@@ -449,7 +449,7 @@ test('TraceContext', async function (t) {
const acctKey = '190'
agent.config.trusted_account_key = acctKey
const duplicateAcctTraceState =
- /* eslint-disable-next-line max-len */
+
'190@nr=bar,42@bar=foo,190@nr=0-0-709288-8599547-f85f42fd82a4cf1d-164d3b4b0d09cb05-1-0.789-1563574856827'
const valid = traceContext._validateAndParseTraceStateHeader(duplicateAcctTraceState)
@@ -697,7 +697,6 @@ test('TraceContext', async function (t) {
assert.equal(supportabilitySpy.callCount, 1)
- // eslint-disable-next-line max-len
assert.equal(
supportabilitySpy.firstCall.args[0],
'TraceContext/TraceState/Accept/Exception'
@@ -956,7 +955,7 @@ test('TraceContext', async function (t) {
assert.ok(!headers.tracestate)
assert.equal(supportabilitySpy.callCount, 2)
- // eslint-disable-next-line max-len
+
assert.equal(
supportabilitySpy.firstCall.args[0],
'TraceContext/TraceState/Create/Exception'
@@ -982,7 +981,7 @@ test('TraceContext', async function (t) {
assert.ok(!headers.tracestate)
assert.equal(supportabilitySpy.callCount, 2)
- // eslint-disable-next-line max-len
+
assert.equal(
supportabilitySpy.firstCall.args[0],
'TraceContext/TraceState/Create/Exception'
@@ -1009,7 +1008,7 @@ test('TraceContext', async function (t) {
assert.ok(!headers.tracestate)
assert.equal(supportabilitySpy.callCount, 2)
- // eslint-disable-next-line max-len
+
assert.equal(
supportabilitySpy.firstCall.args[0],
'TraceContext/TraceState/Create/Exception'
diff --git a/test/unit/environment.test.js b/test/unit/environment.test.js
index 79d35a30ef..568241355c 100644
--- a/test/unit/environment.test.js
+++ b/test/unit/environment.test.js
@@ -242,7 +242,7 @@ test('should not crash when given a file in NODE_PATH', (t, end) => {
}
const opt = {
- env: env,
+ env,
stdio: 'inherit',
cwd: path.join(__dirname, '..')
}
@@ -360,6 +360,6 @@ test('when NODE_ENV is "production"', async (t) => {
assert.equal(
find(nSettings, 'NODE_ENV'),
'production',
- `should save the NODE_ENV value in the environment settings`
+ 'should save the NODE_ENV value in the environment settings'
)
})
diff --git a/test/unit/errors/error-collector.test.js b/test/unit/errors/error-collector.test.js
index 7ca9deda95..948649095f 100644
--- a/test/unit/errors/error-collector.test.js
+++ b/test/unit/errors/error-collector.test.js
@@ -1271,6 +1271,7 @@ test('Errors', async (t) => {
try {
api.startBackgroundTransaction('job', () => {
+ // eslint-disable-next-line no-throw-literal
throw null
})
} catch (err) {
@@ -2029,7 +2030,7 @@ test('Errors', async (t) => {
server.listen(0, 'localhost', () => {
const port = server.address().port
- http.get({ port: port, host: 'localhost' })
+ http.get({ port, host: 'localhost' })
})
agent.on('transactionFinished', function (tx) {
diff --git a/test/unit/errors/error-event-aggregator.test.js b/test/unit/errors/error-event-aggregator.test.js
index 77c9bff429..3065cf3c0e 100644
--- a/test/unit/errors/error-event-aggregator.test.js
+++ b/test/unit/errors/error-event-aggregator.test.js
@@ -51,7 +51,7 @@ test('Error Event Aggregator', async (t) => {
await t.test('toPayload() should return json format of data', (t) => {
const { errorEventAggregator } = t.nr
const expectedMetrics = { reservoir_size: LIMIT, events_seen: 1 }
- const rawErrorEvent = [{ 'type': 'TransactionError', 'error.class': 'class' }, {}, {}]
+ const rawErrorEvent = [{ type: 'TransactionError', 'error.class': 'class' }, {}, {}]
errorEventAggregator.add(rawErrorEvent)
diff --git a/test/unit/errors/error-trace-aggregator.test.js b/test/unit/errors/error-trace-aggregator.test.js
index f1bfa3baf7..48c7da868c 100644
--- a/test/unit/errors/error-trace-aggregator.test.js
+++ b/test/unit/errors/error-trace-aggregator.test.js
@@ -81,6 +81,7 @@ test('Error Trace Aggregator', async (t) => {
errorTraceAggregator.add(rawErrorTrace)
errorTraceAggregator._toPayload((err, payload) => {
+ assert.ifError(err)
assert.equal(payload.length, 2, 'payload should have two elements')
const [runId, errorTraceData] = payload
diff --git a/test/unit/grpc/connection.test.js b/test/unit/grpc/connection.test.js
index a71b5e34ff..bfc7fe2e71 100644
--- a/test/unit/grpc/connection.test.js
+++ b/test/unit/grpc/connection.test.js
@@ -28,10 +28,6 @@ const fakeTraceObserverConfig = {
}
class FakeStreamer extends EventEmitter {
- constructor() {
- super()
- }
-
emitStatus(status) {
this.emit('status', status)
}
@@ -45,8 +41,8 @@ const createMetricAggregatorForTests = () => {
{
// runId: RUN_ID,
apdexT: 0.5,
- mapper: mapper,
- normalizer: normalizer
+ mapper,
+ normalizer
},
{},
{ add() {} }
diff --git a/test/unit/header-attributes.test.js b/test/unit/header-attributes.test.js
index 470552562c..3d383b672f 100644
--- a/test/unit/header-attributes.test.js
+++ b/test/unit/header-attributes.test.js
@@ -120,8 +120,8 @@ test('#collectRequestHeaders', async (t) => {
agent.config.allow_all_headers = false
const headers = {
- 'invalid': 'header',
- 'referer': 'valid-referer',
+ invalid: 'header',
+ referer: 'valid-referer',
'content-type': 'valid-type'
}
@@ -145,8 +145,8 @@ test('#collectRequestHeaders', async (t) => {
agent.config.allow_all_headers = false
const headers = {
- 'invalid': 'header',
- 'referer': 'valid-referer',
+ invalid: 'header',
+ referer: 'valid-referer',
'content-type': 'valid-type'
}
@@ -175,8 +175,8 @@ test('#collectRequestHeaders', async (t) => {
agent.config.allow_all_headers = true
const headers = {
- 'valid': 'header',
- 'referer': 'valid-referer',
+ valid: 'header',
+ referer: 'valid-referer',
'content-type': 'valid-type',
'X-filtered-out': 'invalid'
}
@@ -208,7 +208,7 @@ test('#collectResponseHeaders', async (t) => {
agent.config.allow_all_headers = false
const headers = {
- 'invalid': 'header',
+ invalid: 'header',
'content-type': 'valid-type'
}
@@ -230,7 +230,7 @@ test('#collectResponseHeaders', async (t) => {
agent.config.allow_all_headers = true
const headers = {
- 'valid': 'header',
+ valid: 'header',
'content-type': 'valid-type',
'X-filtered-out': 'invalid'
}
diff --git a/test/unit/header-processing.test.js b/test/unit/header-processing.test.js
index fb343afcee..1260f65b65 100644
--- a/test/unit/header-processing.test.js
+++ b/test/unit/header-processing.test.js
@@ -21,9 +21,9 @@ test('#getContentLengthFromHeaders', async (t) => {
// does it ignore other headers?
assert.equal(
headerProcessing.getContentLengthFromHeaders({
- 'zip': 'zap',
+ zip: 'zap',
'Content-Length': 100,
- 'foo': 'bar'
+ foo: 'bar'
}),
100
)
@@ -34,10 +34,10 @@ test('#getContentLengthFromHeaders', async (t) => {
// replacing
assert.equal(
headerProcessing.getContentLengthFromHeaders({
- 'zip': 'zap',
+ zip: 'zap',
'content-length': 50,
'Content-Length': 100,
- 'foo': 'bar'
+ foo: 'bar'
}),
50
)
diff --git a/test/unit/index.test.js b/test/unit/index.test.js
index ca3a4c3199..a529d94103 100644
--- a/test/unit/index.test.js
+++ b/test/unit/index.test.js
@@ -213,7 +213,7 @@ test('index tests', async (t) => {
function loadIndex(ctx) {
return proxyquire('../../index', {
- 'worker_threads': ctx.nr.workerThreadsStub,
+ worker_threads: ctx.nr.workerThreadsStub,
'./lib/util/process-version': ctx.nr.processVersionStub,
'./lib/logger': ctx.nr.loggerMock,
'./lib/agent': ctx.nr.MockAgent,
diff --git a/test/unit/instrumentation/connect.test.js b/test/unit/instrumentation/connect.test.js
index 99557dcd74..5485a8cc3c 100644
--- a/test/unit/instrumentation/connect.test.js
+++ b/test/unit/instrumentation/connect.test.js
@@ -3,8 +3,6 @@
* SPDX-License-Identifier: Apache-2.0
*/
-/* eslint-disable strict */
-
const test = require('node:test')
const assert = require('node:assert')
const helper = require('../../lib/agent_helper')
@@ -58,7 +56,7 @@ test('for Connect 1 (stubbed)', async function (t) {
prototype: {
use: function (route, middleware) {
if (this.stack && typeof middleware === 'function') {
- this.stack.push({ route: route, handle: middleware })
+ this.stack.push({ route, handle: middleware })
} else if (this.stack && typeof route === 'function') {
this.stack.push({ route: '', handle: route })
}
@@ -83,7 +81,7 @@ test('for Connect 1 (stubbed)', async function (t) {
await t.test("shouldn't throw if there's no middleware chain", async function (t) {
const { app } = t.nr
assert.doesNotThrow(() => {
- app.use.call(app, nextulator)
+ app.use(nextulator)
})
})
@@ -92,7 +90,7 @@ test('for Connect 1 (stubbed)', async function (t) {
app.stack = []
assert.doesNotThrow(function () {
- app.use.call(app, '/')
+ app.use('/')
})
})
@@ -103,7 +101,7 @@ test('for Connect 1 (stubbed)', async function (t) {
app.stack = []
assert.doesNotThrow(function () {
- app.use.call(app, '/', 'hamburglar')
+ app.use('/', 'hamburglar')
})
}
)
@@ -116,11 +114,11 @@ test('for Connect 1 (stubbed)', async function (t) {
app.stack = []
- app.use.call(app, '/', nextulator)
- app.use.call(app, '/test', nextulator)
- app.use.call(app, '/error1', errulator)
- app.use.call(app, '/help', nextulator)
- app.use.call(app, '/error2', errulator)
+ app.use('/', nextulator)
+ app.use('/test', nextulator)
+ app.use('/error1', errulator)
+ app.use('/help', nextulator)
+ app.use('/error2', errulator)
assert.equal(app.stack.length, 5)
})
@@ -128,33 +126,28 @@ test('for Connect 1 (stubbed)', async function (t) {
await t.test(
"shouldn't barf on functions with ES5 future reserved keyword names",
async function (t) {
+ // We are using a `new Function` here to get around:
+ // https://github.com/eslint/eslint/issues/19251
const { app } = t.nr
- // doin this on porpoise
- /* eslint-disable */
- function static(req, res, next) {
- return next()
- }
-
- app.stack = []
-
- assert.doesNotThrow(function () { app.use.call(app, '/', static); })
- })
+ const fn = new Function('function static(req, res, next) { return next() }')
+ app.stack = []
+ assert.doesNotThrow(function () { app.use('/', fn) })
+ })
})
-test("for Connect 2 (stubbed)", async function(t) {
+test('for Connect 2 (stubbed)', async function(t) {
t.beforeEach(function (ctx) {
ctx.nr = {}
const agent = helper.instrumentMockedAgent()
const stub = {
- version : '2.7.2',
- proto : {
- use : function (route, middleware) {
+ version: '2.7.2',
+ proto: {
+ use: function (route, middleware) {
if (this.stack && typeof middleware === 'function') {
- this.stack.push({route : route, handle : middleware})
- }
- else if (this.stack && typeof route === 'function') {
- this.stack.push({route : '', handle : route})
+ this.stack.push({ route, handle: middleware })
+ } else if (this.stack && typeof route === 'function') {
+ this.stack.push({ route: '', handle: route })
}
return this
@@ -175,21 +168,21 @@ test("for Connect 2 (stubbed)", async function(t) {
await t.test("shouldn't throw if there's no middleware chain", async function(t) {
const { app } = t.nr
- assert.doesNotThrow(function () { app.use.call(app, nextulator); })
+ assert.doesNotThrow(function () { app.use(nextulator) })
})
await t.test("shouldn't throw if there's a middleware link with no handler", async function(t) {
const { app } = t.nr
app.stack = []
- assert.doesNotThrow(function () { app.use.call(app, '/'); })
+ assert.doesNotThrow(function () { app.use('/') })
})
await t.test("shouldn't throw if there's a middleware link with a non-function handler", async function(t) {
const { app } = t.nr
app.stack = []
- assert.doesNotThrow(function () { app.use.call(app, '/', 'hamburglar'); })
+ assert.doesNotThrow(function () { app.use('/', 'hamburglar') })
})
await t.test("shouldn't break use", async function(t) {
@@ -200,26 +193,21 @@ test("for Connect 2 (stubbed)", async function(t) {
app.stack = []
- app.use.call(app, '/', nextulator)
- app.use.call(app, '/test', nextulator)
- app.use.call(app, '/error1', errulator)
- app.use.call(app, '/help', nextulator)
- app.use.call(app, '/error2', errulator)
+ app.use('/', nextulator)
+ app.use('/test', nextulator)
+ app.use('/error1', errulator)
+ app.use('/help', nextulator)
+ app.use('/error2', errulator)
assert.equal(app.stack.length, 5)
})
await t.test("shouldn't barf on functions with ES5 future reserved keyword names", async function(t) {
+ // We are using a `new Function` here to get around:
+ // https://github.com/eslint/eslint/issues/19251
const { app } = t.nr
- // doin this on porpoise
- function static(req, res, next) {
- return next()
- }
-
+ const fn = new Function('function static(req, res, next) { return next() }')
app.stack = []
-
- assert.doesNotThrow(function () { app.use.call(app, '/', static); })
+ assert.doesNotThrow(function () { app.use('/', fn) })
})
})
-
-/* eslint-enable strict */
diff --git a/test/unit/instrumentation/core/domain.test.js b/test/unit/instrumentation/core/domain.test.js
index b3809c4d7c..3e29067d45 100644
--- a/test/unit/instrumentation/core/domain.test.js
+++ b/test/unit/instrumentation/core/domain.test.js
@@ -37,7 +37,7 @@ test('Domains', async (t) => {
await t.test('should retain transaction scope on error events', (t, end) => {
const { agent, tasks } = t.nr
- // eslint-disable-next-line node/no-deprecated-api
+ // eslint-disable-next-line n/no-deprecated-api
const domain = require('domain')
const d = domain.create()
diff --git a/test/unit/instrumentation/core/fixtures/unhandled-rejection.js b/test/unit/instrumentation/core/fixtures/unhandled-rejection.js
index 7e86681705..7a4a41c932 100644
--- a/test/unit/instrumentation/core/fixtures/unhandled-rejection.js
+++ b/test/unit/instrumentation/core/fixtures/unhandled-rejection.js
@@ -10,11 +10,11 @@ const agent = helper.instrumentMockedAgent()
process.once('unhandledRejection', function () {})
helper.runInTransaction(agent, function (transaction) {
- Promise.reject('test rejection')
+ Promise.reject(Error('test rejection'))
setTimeout(function () {
assert.equal(transaction.exceptions.length, 0)
- // eslint-disable-next-line no-process-exit
+
process.exit(0)
}, 15)
})
diff --git a/test/unit/instrumentation/elasticsearch.test.js b/test/unit/instrumentation/elasticsearch.test.js
index 02ed7d6bda..3ce5341fba 100644
--- a/test/unit/instrumentation/elasticsearch.test.js
+++ b/test/unit/instrumentation/elasticsearch.test.js
@@ -22,7 +22,7 @@ test('parsePath should behave as expected', async (t) => {
methods.forEach((m) => {
const { collection, operation } = parsePath(path, m.name)
const expectedOp = `index.${m.expected}`
- assert.equal(collection, 'indexName', `index should be 'indexName'`)
+ assert.equal(collection, 'indexName', "index should be 'indexName'")
assert.equal(operation, expectedOp, 'operation should include index and method')
})
})
@@ -30,9 +30,9 @@ test('parsePath should behave as expected', async (t) => {
const path = '/indexName/_search'
methods.forEach((m) => {
const { collection, operation } = parsePath(path, m.name)
- const expectedOp = `search`
- assert.equal(collection, 'indexName', `index should be 'indexName'`)
- assert.equal(operation, expectedOp, `operation should be 'search'`)
+ const expectedOp = 'search'
+ assert.equal(collection, 'indexName', "index should be 'indexName'")
+ assert.equal(operation, expectedOp, "operation should be 'search'")
})
})
await t.test('search of all indices', async function () {
@@ -43,7 +43,7 @@ test('parsePath should behave as expected', async (t) => {
return
}
const { collection, operation } = parsePath(path, m.name)
- const expectedOp = `search`
+ const expectedOp = 'search'
assert.equal(collection, 'any', 'index should be `any`')
assert.equal(operation, expectedOp, `operation should match ${expectedOp}`)
})
@@ -53,7 +53,7 @@ test('parsePath should behave as expected', async (t) => {
methods.forEach((m) => {
const { collection, operation } = parsePath(path, m.name)
const expectedOp = `doc.${m.expected}`
- assert.equal(collection, 'indexName', `index should be 'indexName'`)
+ assert.equal(collection, 'indexName', "index should be 'indexName'")
assert.equal(operation, expectedOp, `operation should match ${expectedOp}`)
})
})
@@ -72,7 +72,7 @@ test('parsePath should behave as expected', async (t) => {
const path = {}
methods.forEach((m) => {
const { collection, operation } = parsePath(path, m.name)
- const expectedOp = `unknown`
+ const expectedOp = 'unknown'
assert.equal(collection, 'any', 'index should be `any`')
assert.equal(operation, expectedOp, `operation should match '${expectedOp}'`)
})
diff --git a/test/unit/instrumentation/generic-pool.test.js b/test/unit/instrumentation/generic-pool.test.js
index 415c5239b3..1c57872377 100644
--- a/test/unit/instrumentation/generic-pool.test.js
+++ b/test/unit/instrumentation/generic-pool.test.js
@@ -55,33 +55,26 @@ test('agent instrumentation of generic-pool', async function (t) {
}
assert.equal(nop.length, 0)
- /* eslint-disable new-cap */
mockPool.Pool(0).acquire(nop)
- /* eslint-enable new-cap */
})
await t.test("must preserve 'callback.length === 1' to keep generic-pool happy", (t, end) => {
- // eslint-disable-next-line no-unused-vars
const nop = function (client) {
end()
}
assert.equal(nop.length, 1)
- /* eslint-disable new-cap */
mockPool.Pool(1).acquire(nop)
- /* eslint-enable new-cap */
})
await t.test("must preserve 'callback.length === 2' to keep generic-pool happy", (t, end) => {
- // eslint-disable-next-line no-unused-vars
+ // eslint-disable-next-line n/handle-callback-err
const nop = function (error, client) {
end()
}
assert.equal(nop.length, 2)
- /* eslint-disable new-cap */
mockPool.Pool(2).acquire(nop)
- /* eslint-enable new-cap */
})
})
})
diff --git a/test/unit/instrumentation/http/http.test.js b/test/unit/instrumentation/http/http.test.js
index b29936c14b..4124426434 100644
--- a/test/unit/instrumentation/http/http.test.js
+++ b/test/unit/instrumentation/http/http.test.js
@@ -219,8 +219,8 @@ test('built-in http module instrumentation', async (t) => {
path: '/path',
method: 'GET',
headers: {
- 'invalid': 'header',
- 'referer': 'valid-referer',
+ invalid: 'header',
+ referer: 'valid-referer',
'content-type': 'valid-type'
}
},
@@ -254,8 +254,8 @@ test('built-in http module instrumentation', async (t) => {
path: '/path',
method: 'GET',
headers: {
- 'valid': 'header',
- 'referer': 'valid-referer',
+ valid: 'header',
+ referer: 'valid-referer',
'content-type': 'valid-type',
'X-filtered-out': 'invalid'
}
@@ -352,7 +352,7 @@ test('built-in http module instrumentation', async (t) => {
path: '/path',
method: 'GET',
headers: {
- 'referer': refererUrl,
+ referer: refererUrl,
'User-Agent': userAgent
}
},
@@ -360,6 +360,7 @@ test('built-in http module instrumentation', async (t) => {
)
function finish(err, statusCode, body) {
+ assert.ifError(err)
const { transaction, transaction2 } = t.nr
const attributes = transaction.trace.attributes.get(DESTINATIONS.TRANS_TRACE)
const segment = transaction.baseSegment
@@ -447,7 +448,7 @@ test('built-in http module instrumentation', async (t) => {
server.on('listening', function () {
const port = server.address().port
- http.get({ host: 'localhost', port: port, headers: headers })
+ http.get({ host: 'localhost', port, headers })
})
helper.startServerWithRandomPortRetry(server)
@@ -472,7 +473,7 @@ test('built-in http module instrumentation', async (t) => {
server.on('listening', function () {
const port = server.address().port
- http.get({ host: 'localhost', port: port, headers: headers })
+ http.get({ host: 'localhost', port, headers })
})
helper.startServerWithRandomPortRetry(server)
@@ -492,7 +493,7 @@ test('built-in http module instrumentation', async (t) => {
server.on('listening', function () {
const port = server.address().port
- http.get({ host: 'localhost', port: port, headers: headers })
+ http.get({ host: 'localhost', port, headers })
})
helper.startServerWithRandomPortRetry(server)
@@ -540,7 +541,7 @@ test('built-in http module instrumentation', async (t) => {
server.on('listening', () => {
const port = server.address().port
- http.get({ host: 'localhost', port: port, headers: headers })
+ http.get({ host: 'localhost', port, headers })
})
helper.startServerWithRandomPortRetry(server)
@@ -579,7 +580,7 @@ test('built-in http module instrumentation', async (t) => {
server.on('listening', () => {
const port = server.address().port
- http.get({ host: 'localhost', port: port, headers: headers }, function (res) {
+ http.get({ host: 'localhost', port, headers }, function (res) {
const data = JSON.parse(
hashes.deobfuscateNameUsingKey(res.headers['x-newrelic-app-data'], encKey)
)
@@ -607,7 +608,7 @@ test('built-in http module instrumentation', async (t) => {
server.on('listening', function () {
const port = server.address().port
- http.get({ host: 'localhost', port: port, headers: headers }, function (res) {
+ http.get({ host: 'localhost', port, headers }, function (res) {
const data = JSON.parse(
hashes.deobfuscateNameUsingKey(res.headers['x-newrelic-app-data'], encKey)
)
@@ -631,7 +632,7 @@ test('built-in http module instrumentation', async (t) => {
server.on('listening', function () {
const port = server.address().port
- http.get({ host: 'localhost', port: port, headers: headers }, function (res) {
+ http.get({ host: 'localhost', port, headers }, function (res) {
assert.ok(!res.headers['x-newrelic-app-data'])
res.resume()
server.close(end)
@@ -653,7 +654,7 @@ test('built-in http module instrumentation', async (t) => {
server.on('listening', function () {
const port = server.address().port
- http.get({ host: 'localhost', port: port, headers: headers }, function (res) {
+ http.get({ host: 'localhost', port, headers }, function (res) {
const data = JSON.parse(
hashes.deobfuscateNameUsingKey(res.headers['x-newrelic-app-data'], encKey)
)
@@ -703,13 +704,13 @@ test('built-in http module instrumentation', async (t) => {
})
const headers = {
- traceparent: traceparent,
- tracestate: tracestate
+ traceparent,
+ tracestate
}
server.on('listening', function () {
const port = server.address().port
- http.get({ host: 'localhost', port: port, headers: headers }, function (res) {
+ http.get({ host: 'localhost', port, headers }, function (res) {
res.resume()
server.close(end)
})
@@ -735,12 +736,12 @@ test('built-in http module instrumentation', async (t) => {
})
const headers = {
- traceparent: traceparent
+ traceparent
}
server.on('listening', function () {
const port = server.address().port
- http.get({ host: 'localhost', port: port, headers: headers }, function (res) {
+ http.get({ host: 'localhost', port, headers }, function (res) {
res.resume()
server.close(end)
})
@@ -766,13 +767,13 @@ test('built-in http module instrumentation', async (t) => {
})
const headers = {
- traceparent: traceparent,
- tracestate: tracestate
+ traceparent,
+ tracestate
}
server.on('listening', function () {
const port = server.address().port
- http.get({ host: 'localhost', port: port, headers: headers }, function (res) {
+ http.get({ host: 'localhost', port, headers }, function (res) {
res.resume()
server.close(end)
})
@@ -817,7 +818,7 @@ test('built-in http module instrumentation', async (t) => {
addSegment({ agent }) // Add web segment so everything works properly
const port = server.address().port
- const req = http.request({ host: 'localhost', port: port }, function (res) {
+ const req = http.request({ host: 'localhost', port }, function (res) {
assert.equal(req.getHeader(NEWRELIC_ID_HEADER), 'o123')
res.resume()
agent.getTransaction().end()
@@ -843,7 +844,7 @@ test('built-in http module instrumentation', async (t) => {
)
const port = server.address().port
- const req = http.get({ host: 'localhost', port: port }, function (res) {
+ const req = http.get({ host: 'localhost', port }, function (res) {
const data = JSON.parse(
hashes.deobfuscateNameUsingKey(req.getHeader(NEWRELIC_TRANSACTION_HEADER), encKey)
)
@@ -868,7 +869,7 @@ test('built-in http module instrumentation', async (t) => {
transaction.tripId = null
const port = server.address().port
- const req = http.get({ host: 'localhost', port: port }, function (res) {
+ const req = http.get({ host: 'localhost', port }, function (res) {
const data = JSON.parse(
hashes.deobfuscateNameUsingKey(req.getHeader(NEWRELIC_TRANSACTION_HEADER), encKey)
)
@@ -897,7 +898,7 @@ test('built-in http module instrumentation', async (t) => {
)
const port = server.address().port
- const req = http.get({ host: 'localhost', port: port }, function (res) {
+ const req = http.get({ host: 'localhost', port }, function (res) {
const data = JSON.parse(
hashes.deobfuscateNameUsingKey(req.getHeader(NEWRELIC_TRANSACTION_HEADER), encKey)
)
@@ -925,7 +926,7 @@ test('built-in http module instrumentation', async (t) => {
const port = server.address().port
http
- .get({ host: 'localhost', port: port }, function (res) {
+ .get({ host: 'localhost', port }, function (res) {
assert.deepEqual(transaction.pathHashes, [pathHash])
res.resume()
transaction.end()
@@ -974,7 +975,7 @@ test('built-in http module instrumentation', async (t) => {
const port = server.address().port
const req = http.request(
- { host: 'localhost', port: port, headers: { a: 1, b: 2 } },
+ { host: 'localhost', port, headers: { a: 1, b: 2 } },
function (res) {
res.resume()
arrayRequest()
@@ -990,7 +991,7 @@ test('built-in http module instrumentation', async (t) => {
const req = http.request(
{
host: 'localhost',
- port: port,
+ port,
headers: [
['a', 1],
['b', 2]
@@ -1011,7 +1012,7 @@ test('built-in http module instrumentation', async (t) => {
const req = http.request(
{
host: 'localhost',
- port: port,
+ port,
headers: { a: 1, b: 2, expect: '100-continue' }
},
function (res) {
diff --git a/test/unit/instrumentation/http/outbound.test.js b/test/unit/instrumentation/http/outbound.test.js
index d8c9785577..db8e95b6a2 100644
--- a/test/unit/instrumentation/http/outbound.test.js
+++ b/test/unit/instrumentation/http/outbound.test.js
@@ -127,10 +127,10 @@ test('instrumentOutbound', async (t) => {
assert.deepEqual(
transaction.trace.root.children[0].attributes.get(DESTINATIONS.SPAN_EVENT),
{
- 'hostname': HOSTNAME,
- 'port': PORT,
- 'url': `http://${HOSTNAME}:${PORT}/asdf`,
- 'procedure': 'GET',
+ hostname: HOSTNAME,
+ port: PORT,
+ url: `http://${HOSTNAME}:${PORT}/asdf`,
+ procedure: 'GET',
'request.parameters.a': 'b',
'request.parameters.another': 'yourself',
'request.parameters.thing': true,
@@ -342,7 +342,7 @@ test('should add data from cat header to segment', async (t) => {
const port = server.address().port
http
- .get({ host: 'localhost', port: port }, function (res) {
+ .get({ host: 'localhost', port }, function (res) {
const segment = agent.tracer.getTransaction().trace.root.children[0]
assert.equal(segment.catId, '123#456')
@@ -364,7 +364,7 @@ test('should add data from cat header to segment', async (t) => {
const port = server.address().port
http
- .get({ host: 'localhost', port: port }, function (res) {
+ .get({ host: 'localhost', port }, function (res) {
const segment = agent.tracer.getTransaction().trace.root.children[0]
assert.equal(segment.catId, '123#456')
diff --git a/test/unit/instrumentation/http/queue-time.test.js b/test/unit/instrumentation/http/queue-time.test.js
index bcf027980d..974928daa8 100644
--- a/test/unit/instrumentation/http/queue-time.test.js
+++ b/test/unit/instrumentation/http/queue-time.test.js
@@ -28,6 +28,7 @@ test('built-in http queueTime', async (t) => {
helper.unloadAgent(ctx.nr.agent)
})
+ // eslint-disable-next-line no-template-curly-in-string
await t.test('header should allow t=${time} style headers', (t, end) => {
const { agent, testDate } = t.nr
const server = http.createServer(function createServerCb(request, response) {
@@ -41,7 +42,7 @@ test('built-in http queueTime', async (t) => {
const port = server.address().port
const opts = {
host: 'localhost',
- port: port,
+ port,
headers: {
'x-request-start': 't=' + (testDate - 10)
}
@@ -64,7 +65,7 @@ test('built-in http queueTime', async (t) => {
const port = server.address().port
const opts = {
host: 'localhost',
- port: port,
+ port,
headers: {
'x-request-start': 'alskdjf'
}
@@ -88,7 +89,7 @@ test('built-in http queueTime', async (t) => {
const port = server.address().port
const opts = {
host: 'localhost',
- port: port,
+ port,
headers: {
'x-request-start': testDate - 10
}
@@ -113,7 +114,7 @@ test('built-in http queueTime', async (t) => {
const port = server.address().port
const opts = {
host: 'localhost',
- port: port,
+ port,
headers: {
'x-queue-start': testDate - 10
}
@@ -137,7 +138,7 @@ test('built-in http queueTime', async (t) => {
const port = server.address().port
const opts = {
host: 'localhost',
- port: port,
+ port,
headers: {
'x-request-start': (testDate - 10) * 1e3
}
@@ -161,7 +162,7 @@ test('built-in http queueTime', async (t) => {
const port = server.address().port
const opts = {
host: 'localhost',
- port: port,
+ port,
headers: {
'x-queue-start': (testDate - 10) * 1e6
}
@@ -185,7 +186,7 @@ test('built-in http queueTime', async (t) => {
const port = server.address().port
const opts = {
host: 'localhost',
- port: port,
+ port,
headers: {
'x-request-start': (testDate - 10) / 1e3
}
diff --git a/test/unit/instrumentation/nextjs/next-server.test.js b/test/unit/instrumentation/nextjs/next-server.test.js
index 9eb6619cdf..e7fd5a71e8 100644
--- a/test/unit/instrumentation/nextjs/next-server.test.js
+++ b/test/unit/instrumentation/nextjs/next-server.test.js
@@ -14,7 +14,7 @@ test('middleware tracking', async (t) => {
t.beforeEach((ctx) => {
ctx.nr = {}
const agent = helper.loadMockedAgent()
- const Shim = require(`../../../../lib/shim/webframework-shim`)
+ const Shim = require('../../../../lib/shim/webframework-shim')
const shim = new Shim(agent, './next-server')
sinon.stub(shim, 'require')
sinon.stub(shim, 'setFramework')
diff --git a/test/unit/instrumentation/opensearch.test.js b/test/unit/instrumentation/opensearch.test.js
index 3589385a26..bc3cbb3530 100644
--- a/test/unit/instrumentation/opensearch.test.js
+++ b/test/unit/instrumentation/opensearch.test.js
@@ -40,7 +40,7 @@ test('parsePath should behave as expected', async (t) => {
methods.forEach((m) => {
const { collection, operation } = parsePath(path, m.name)
const expectedOp = `index.${m.expected}`
- assert.equal(collection, 'indexName', `index should be 'indexName'`)
+ assert.equal(collection, 'indexName', "index should be 'indexName'")
assert.equal(operation, expectedOp, 'operation should include index and method')
})
})
@@ -48,9 +48,9 @@ test('parsePath should behave as expected', async (t) => {
const path = '/indexName/_search'
methods.forEach((m) => {
const { collection, operation } = parsePath(path, m.name)
- const expectedOp = `search`
- assert.equal(collection, 'indexName', `index should be 'indexName'`)
- assert.equal(operation, expectedOp, `operation should be 'search'`)
+ const expectedOp = 'search'
+ assert.equal(collection, 'indexName', "index should be 'indexName'")
+ assert.equal(operation, expectedOp, "operation should be 'search'")
})
})
await t.test('search of all indices', async function () {
@@ -61,7 +61,7 @@ test('parsePath should behave as expected', async (t) => {
return
}
const { collection, operation } = parsePath(path, m.name)
- const expectedOp = `search`
+ const expectedOp = 'search'
assert.equal(collection, 'any', 'index should be `any`')
assert.equal(operation, expectedOp, `operation should match ${expectedOp}`)
})
@@ -71,7 +71,7 @@ test('parsePath should behave as expected', async (t) => {
methods.forEach((m) => {
const { collection, operation } = parsePath(path, m.name)
const expectedOp = `doc.${m.expected}`
- assert.equal(collection, 'indexName', `index should be 'indexName'`)
+ assert.equal(collection, 'indexName', "index should be 'indexName'")
assert.equal(operation, expectedOp, `operation should match ${expectedOp}`)
})
})
@@ -90,7 +90,7 @@ test('parsePath should behave as expected', async (t) => {
const path = {}
methods.forEach((m) => {
const { collection, operation } = parsePath(path, m.name)
- const expectedOp = `unknown`
+ const expectedOp = 'unknown'
assert.equal(collection, 'any', 'index should be `any`')
assert.equal(operation, expectedOp, `operation should match '${expectedOp}'`)
})
diff --git a/test/unit/instrumentation/postgresql.test.js b/test/unit/instrumentation/postgresql.test.js
index 83651f34be..4635f409de 100644
--- a/test/unit/instrumentation/postgresql.test.js
+++ b/test/unit/instrumentation/postgresql.test.js
@@ -65,6 +65,7 @@ test('Lazy loading of native PG client', async (t) => {
initialize(agent, mockPg, 'pg', shim)
assert.doesNotThrow(function pleaseDoNotThrow() {
+ // eslint-disable-next-line no-unused-expressions
mockPg.native
})
diff --git a/test/unit/instrumentation/undici.test.js b/test/unit/instrumentation/undici.test.js
index 1db52ab1c8..95d81eb356 100644
--- a/test/unit/instrumentation/undici.test.js
+++ b/test/unit/instrumentation/undici.test.js
@@ -106,7 +106,7 @@ test('undici instrumentation', async function (t) {
channels.create.publish({ request: { origin: HOST, path: '/foo-2', addHeader } })
assert.equal(addHeader.callCount, 2)
assert.equal(addHeader.args[0][0], 'traceparent')
- assert.match(addHeader.args[0][1], /^[\w\d\-]{55}$/)
+ assert.match(addHeader.args[0][1], /^[\w-]{55}$/)
assert.deepEqual(addHeader.args[1], ['newrelic', ''])
tx.end()
end()
@@ -122,7 +122,7 @@ test('undici instrumentation', async function (t) {
channels.create.publish({ request: { origin: HOST, path: '/foo-2', addHeader } })
assert.equal(addHeader.callCount, 1)
assert.equal(addHeader.args[0][0], 'X-NewRelic-Transaction')
- assert.match(addHeader.args[0][1], /^[\w\d/-]{60,80}={0,2}$/)
+ assert.match(addHeader.args[0][1], /^[\w/-]{60,80}={0,2}$/)
tx.end()
end()
})
diff --git a/test/unit/llm-events/aws-bedrock/chat-completion-message.test.js b/test/unit/llm-events/aws-bedrock/chat-completion-message.test.js
index 4e484e84e6..9dcc95f41a 100644
--- a/test/unit/llm-events/aws-bedrock/chat-completion-message.test.js
+++ b/test/unit/llm-events/aws-bedrock/chat-completion-message.test.js
@@ -35,7 +35,7 @@ test.beforeEach((ctx) => {
get(key) {
assert.equal(key, TRANS_SCOPE)
return {
- ['llm.conversation_id']: 'conversation-1'
+ 'llm.conversation_id': 'conversation-1'
}
}
}
diff --git a/test/unit/llm-events/aws-bedrock/chat-completion-summary.test.js b/test/unit/llm-events/aws-bedrock/chat-completion-summary.test.js
index 0bc79f281f..81d111cb16 100644
--- a/test/unit/llm-events/aws-bedrock/chat-completion-summary.test.js
+++ b/test/unit/llm-events/aws-bedrock/chat-completion-summary.test.js
@@ -28,7 +28,7 @@ test.beforeEach((ctx) => {
get(key) {
assert.equal(key, TRANS_SCOPE)
return {
- ['llm.conversation_id']: 'conversation-1'
+ 'llm.conversation_id': 'conversation-1'
}
}
}
diff --git a/test/unit/llm-events/aws-bedrock/embedding.test.js b/test/unit/llm-events/aws-bedrock/embedding.test.js
index 801e7f64a4..35a4e1d4d7 100644
--- a/test/unit/llm-events/aws-bedrock/embedding.test.js
+++ b/test/unit/llm-events/aws-bedrock/embedding.test.js
@@ -35,7 +35,7 @@ test.beforeEach((ctx) => {
get(key) {
assert.equal(key, TRANS_SCOPE)
return {
- ['llm.conversation_id']: 'conversation-1'
+ 'llm.conversation_id': 'conversation-1'
}
}
}
diff --git a/test/unit/llm-events/aws-bedrock/event.test.js b/test/unit/llm-events/aws-bedrock/event.test.js
index 1d062b0bee..1703c007a5 100644
--- a/test/unit/llm-events/aws-bedrock/event.test.js
+++ b/test/unit/llm-events/aws-bedrock/event.test.js
@@ -28,7 +28,7 @@ test.beforeEach((ctx) => {
get(key) {
assert.equal(key, TRANS_SCOPE)
return {
- ['llm.conversation_id']: 'conversation-1',
+ 'llm.conversation_id': 'conversation-1',
omit: 'me'
}
}
diff --git a/test/unit/llm-events/aws-bedrock/stream-handler.test.js b/test/unit/llm-events/aws-bedrock/stream-handler.test.js
index a9762dfafe..292a437cdb 100644
--- a/test/unit/llm-events/aws-bedrock/stream-handler.test.js
+++ b/test/unit/llm-events/aws-bedrock/stream-handler.test.js
@@ -62,8 +62,7 @@ test.beforeEach((ctx) => {
ctx.nr.chunks = [{ foo: 'foo' }]
- /* eslint-disable prettier/prettier */ // It doesn't like the IIFE syntax
- ctx.nr.stream = (async function* originalStream() {
+ ctx.nr.stream = (async function * originalStream() {
const encoder = new TextEncoder()
for (const chunk of ctx.nr.chunks) {
const json = JSON.stringify(chunk)
@@ -71,7 +70,6 @@ test.beforeEach((ctx) => {
yield { chunk: { bytes } }
}
}())
- /* eslint-enable prettier/prettier */
})
test('unrecognized or unhandled model uses original stream', async (t) => {
diff --git a/test/unit/llm-events/error.test.js b/test/unit/llm-events/error.test.js
index cd76a34d78..684a91458d 100644
--- a/test/unit/llm-events/error.test.js
+++ b/test/unit/llm-events/error.test.js
@@ -18,10 +18,10 @@ test('LlmErrorMessage', async () => {
'error.message': undefined,
'error.code': 'insufficient_quota',
'error.param': 'test-param',
- 'completion_id': undefined,
- 'embedding_id': undefined,
- 'vector_store_id': undefined,
- 'tool_id': undefined
+ completion_id: undefined,
+ embedding_id: undefined,
+ vector_store_id: undefined,
+ tool_id: undefined
}
assert.ok(errorMsg.toString(), 'LlmErrorMessage')
assert.equal(errorMsg['http.statusCode'], expected['http.statusCode'])
diff --git a/test/unit/llm-events/langchain/event.test.js b/test/unit/llm-events/langchain/event.test.js
index 2fe2d064d8..6199deb7b9 100644
--- a/test/unit/llm-events/langchain/event.test.js
+++ b/test/unit/llm-events/langchain/event.test.js
@@ -19,7 +19,7 @@ test.beforeEach((ctx) => {
'llm.conversation_id': 'test-conversation',
'llm.foo': 'bar',
'llm.bar': 'baz',
- 'customKey': 'customValue'
+ customKey: 'customValue'
}
}
}
diff --git a/test/unit/llm-events/openai/common.js b/test/unit/llm-events/openai/common.js
index 1eab799cb4..dcf655bbd8 100644
--- a/test/unit/llm-events/openai/common.js
+++ b/test/unit/llm-events/openai/common.js
@@ -46,17 +46,17 @@ const req = {
function getExpectedResult(tx, event, type, completionId) {
const trace = tx.trace.root
let expected = {
- 'id': event.id,
- 'appName': 'New Relic for Node.js tests',
- 'request_id': 'req-id',
- 'trace_id': tx.traceId,
- 'span_id': trace.children[0].id,
+ id: event.id,
+ appName: 'New Relic for Node.js tests',
+ request_id: 'req-id',
+ trace_id: tx.traceId,
+ span_id: trace.children[0].id,
'response.model': 'gpt-3.5-turbo-0613',
- 'vendor': 'openai',
- 'ingest_source': 'Node'
+ vendor: 'openai',
+ ingest_source: 'Node'
}
const resKeys = {
- 'duration': trace.children[0].getDurationInMillis(),
+ duration: trace.children[0].getDurationInMillis(),
'request.model': 'gpt-3.5-turbo-0613',
'response.organization': 'new-relic',
'response.headers.llmVersion': '1.0.0',
@@ -78,10 +78,10 @@ function getExpectedResult(tx, event, type, completionId) {
expected = {
...expected,
...resKeys,
- ['request.max_tokens']: '1000000',
- ['request.temperature']: 'medium-rare',
- ['response.number_of_messages']: 3,
- ['response.choices.finish_reason']: 'stop',
+ 'request.max_tokens': '1000000',
+ 'request.temperature': 'medium-rare',
+ 'response.number_of_messages': 3,
+ 'response.choices.finish_reason': 'stop',
error: false
}
break
diff --git a/test/unit/metric/datastore-instance.test.js b/test/unit/metric/datastore-instance.test.js
index 243527b194..d82ade2a2c 100644
--- a/test/unit/metric/datastore-instance.test.js
+++ b/test/unit/metric/datastore-instance.test.js
@@ -47,9 +47,9 @@ test('Datastore instance metrics collected via the datastore shim', async functi
// Otherwise use 'default'.
let port = 'default'
if (
- test.hasOwnProperty('unix_socket') ||
- test.hasOwnProperty('database_path') ||
- test.hasOwnProperty('port')
+ Object.prototype.hasOwnProperty.call(test, 'unix_socket') ||
+ Object.prototype.hasOwnProperty.call(test, 'database_path') ||
+ Object.prototype.hasOwnProperty.call(test, 'port')
) {
port = test.unix_socket || test.database_path || test.port
}
diff --git a/test/unit/metric/metric-aggregator.test.js b/test/unit/metric/metric-aggregator.test.js
index d32d5dd036..9d83414d19 100644
--- a/test/unit/metric/metric-aggregator.test.js
+++ b/test/unit/metric/metric-aggregator.test.js
@@ -159,6 +159,7 @@ test('Metric Aggregator', async (t) => {
const expectedEndSeconds = EXPECTED_START_SECONDS + secondsToElapse
metricAggregator._toPayload((err, payload) => {
+ assert.ifError(err)
assert.equal(payload.length, 4)
const [runId, startTime, endTime, metricData] = payload
diff --git a/test/unit/metrics-recorder/distributed-trace.test.js b/test/unit/metrics-recorder/distributed-trace.test.js
index f41d95991a..b343a3e43e 100644
--- a/test/unit/metrics-recorder/distributed-trace.test.js
+++ b/test/unit/metrics-recorder/distributed-trace.test.js
@@ -39,9 +39,9 @@ function beforeEach(ctx) {
})
// Set the DT required data after config runs, since they'll be cleared when
// not in serverless_mode
- ;(agent.config.account_id = '1234'),
- (agent.config.primary_application_id = '5678'),
- (agent.config.trusted_account_key = '1234')
+ agent.config.account_id = '1234'
+ agent.config.primary_application_id = '5678'
+ agent.config.trusted_account_key = '1234'
ctx.nr.tx = new Transaction(agent)
ctx.nr.agent = agent
}
diff --git a/test/unit/metrics-recorder/http.test.js b/test/unit/metrics-recorder/http.test.js
index afacf786ed..b9b988e485 100644
--- a/test/unit/metrics-recorder/http.test.js
+++ b/test/unit/metrics-recorder/http.test.js
@@ -78,7 +78,8 @@ test('recordWeb when recording web transactions with distributed tracing enabled
agent.config.distributed_tracing.enabled = true
agent.config.cross_application_tracer.enabled = true
agent.config.account_id = '1234'
- ;(agent.config.primary_application_id = '5677'), (agent.config.trusted_account_key = '1234')
+ agent.config.primary_application_id = '5677'
+ agent.config.trusted_account_key = '1234'
const payload = trans._createDistributedTracePayload().text()
trans.isDistributedTrace = null
@@ -130,7 +131,8 @@ test('recordWeb when recording web transactions with distributed tracing enabled
agent.config.distributed_tracing.enabled = true
agent.config.cross_application_tracer.enabled = true
agent.config.account_id = '1234'
- ;(agent.config.primary_application_id = '5677'), (agent.config.trusted_account_key = '1234')
+ agent.config.primary_application_id = '5677'
+ agent.config.trusted_account_key = '1234'
record({
transaction: trans,
diff --git a/test/unit/name-state.test.js b/test/unit/name-state.test.js
index 896a7f8a01..3809b30d9d 100644
--- a/test/unit/name-state.test.js
+++ b/test/unit/name-state.test.js
@@ -51,6 +51,7 @@ test('should delete the name when reset', () => {
test('should handle regex paths', () => {
const state = new NameState('Nodejs', 'GET', '/', [])
+ // eslint-disable-next-line prefer-regex-literals
state.appendPath(new RegExp('regex1'))
state.appendPath('path1')
state.appendPath(/regex2/)
diff --git a/test/unit/rum.test.js b/test/unit/rum.test.js
index 5816fcba76..a44683f2a0 100644
--- a/test/unit/rum.test.js
+++ b/test/unit/rum.test.js
@@ -159,10 +159,10 @@ test('the RUM API', async function (t) {
const timingHeader = api.getBrowserTimingHeader()
assert.ok(
timingHeader.startsWith(
- ``))
+ assert.ok(timingHeader.endsWith('}; function() {}'))
end()
})
})
@@ -174,10 +174,10 @@ test('the RUM API', async function (t) {
const timingHeader = api.getBrowserTimingHeader({ allowTransactionlessInjection: true })
assert.ok(
timingHeader.startsWith(
- ``))
+ assert.ok(timingHeader.endsWith('}; function() {}'))
}
)
@@ -190,10 +190,10 @@ test('the RUM API', async function (t) {
const timingHeader = api.getBrowserTimingHeader({ nonce: '12345' })
assert.ok(
timingHeader.startsWith(
- ``))
+ assert.ok(timingHeader.endsWith('}; function() {}'))
end()
})
}
@@ -211,7 +211,7 @@ test('the RUM API', async function (t) {
'window.NREUM||(NREUM={});NREUM.info = {"licenseKey":1234,"applicationID":12345,'
)
)
- assert.ok(timingHeader.endsWith(`}; function() {}`))
+ assert.ok(timingHeader.endsWith('}; function() {}'))
end()
})
}
diff --git a/test/unit/sampler.test.js b/test/unit/sampler.test.js
index 704b3c1ab9..a1b8f8886a 100644
--- a/test/unit/sampler.test.js
+++ b/test/unit/sampler.test.js
@@ -276,7 +276,7 @@ function spinLoop(cb) {
setTimeout(function () {
let trash = []
for (let i = 0; i < 100000; ++i) {
- trash.push([{ i: i }])
+ trash.push([{ i }])
}
trash = null
diff --git a/test/unit/serverless/alb-event.test.js b/test/unit/serverless/alb-event.test.js
index 227ff8f16f..529dcb9f89 100644
--- a/test/unit/serverless/alb-event.test.js
+++ b/test/unit/serverless/alb-event.test.js
@@ -14,7 +14,7 @@ const helper = require('../../lib/agent_helper')
const AwsLambda = require('../../../lib/serverless/aws-lambda')
const { DESTINATIONS: ATTR_DEST } = require('../../../lib/config/attribute-filter')
-const { albEvent: albEvent } = require('./fixtures')
+const { albEvent } = require('./fixtures')
test.beforeEach((ctx) => {
// This env var suppresses console output we don't need to inspect.
diff --git a/test/unit/serverless/aws-lambda.test.js b/test/unit/serverless/aws-lambda.test.js
index 3f2d81b80d..4c6a0b4096 100644
--- a/test/unit/serverless/aws-lambda.test.js
+++ b/test/unit/serverless/aws-lambda.test.js
@@ -58,7 +58,7 @@ test('AwsLambda.patchLambdaHandler', async (t) => {
done() {},
succeed() {},
fail() {},
- functionName: functionName,
+ functionName,
functionVersion: 'TestVersion',
invokedFunctionArn: 'arn:test:function',
memoryLimitInMB: '128',
@@ -417,14 +417,12 @@ test('AwsLambda.patchLambdaHandler', async (t) => {
const apiGatewayProxyEvent = lambdaSampleEvents.apiGatewayProxyEvent
const wrappedHandler = awsLambda.patchLambdaHandler(() => {
- return new Promise((resolve) => {
- resolve({
- status: 200,
- statusCode: 200,
- statusDescription: 'Success',
- isBase64Encoded: false,
- headers: {}
- })
+ return Promise.resolve({
+ status: 200,
+ statusCode: 200,
+ statusDescription: 'Success',
+ isBase64Encoded: false,
+ headers: {}
})
})
@@ -910,14 +908,14 @@ test('AwsLambda.patchLambdaHandler', async (t) => {
assert.equal(
agentAttributes[EVENTSOURCE_ARN],
'arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/lambda-279XGJDqGZ5rsrHC2Fjr/49e9d65c45c6791a'
- ) // eslint-disable-line max-len
+ )
assert.equal(agentAttributes[EVENTSOURCE_TYPE], 'alb')
assert.equal(
spanAttributes[EVENTSOURCE_ARN],
'arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/lambda-279XGJDqGZ5rsrHC2Fjr/49e9d65c45c6791a'
- ) // eslint-disable-line max-len
+ )
assert.equal(spanAttributes[EVENTSOURCE_TYPE], 'alb')
end()
@@ -1002,7 +1000,7 @@ test('AwsLambda.patchLambdaHandler', async (t) => {
assert.equal(
agentAttributes[EVENTSOURCE_ARN],
'arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/lambda-279XGJDqGZ5rsrHC2Fjr/49e9d65c45c6791a'
- ) // eslint-disable-line max-len
+ )
assert.equal(agentAttributes[EVENTSOURCE_TYPE], 'alb')
@@ -1014,7 +1012,7 @@ test('AwsLambda.patchLambdaHandler', async (t) => {
assert.equal(
spanAttributes[EVENTSOURCE_ARN],
'arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/lambda-279XGJDqGZ5rsrHC2Fjr/49e9d65c45c6791a'
- ) // eslint-disable-line max-len
+ )
assert.equal(spanAttributes[EVENTSOURCE_TYPE], 'alb')
@@ -1074,6 +1072,7 @@ test('AwsLambda.patchLambdaHandler', async (t) => {
agent.on('harvestStarted', confirmErrorCapture)
const wrappedHandler = awsLambda.patchLambdaHandler((event, context, callback) => {
+ // eslint-disable-next-line n/no-callback-literal
callback('failed')
})
@@ -1411,6 +1410,8 @@ test('AwsLambda.patchLambdaHandler', async (t) => {
let transaction
const wrappedHandler = awsLambda.patchLambdaHandler(async () => {
transaction = agent.tracer.getTransaction()
+ // We need this promise to evaluate out-of-band in order to test the
+ // correct scenario.
// eslint-disable-next-line no-new
new Promise(() => {
assert.ok(transaction)
diff --git a/test/unit/serverless/fixtures.js b/test/unit/serverless/fixtures.js
index 3172e2c4ce..736a25a36b 100644
--- a/test/unit/serverless/fixtures.js
+++ b/test/unit/serverless/fixtures.js
@@ -221,13 +221,13 @@ const httpApiGatewayV2EventAlt = {
rawPath: '/dev/',
rawQueryString: '',
headers: {
- 'accept':
+ accept:
'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
'accept-encoding': 'gzip, deflate, br, zstd',
'accept-language': 'en-US,en;q=0.9',
'content-length': '0',
- 'host': 'zzz1234567890.execute-api.us-east-2.amazonaws.com',
- 'priority': 'u=0, i',
+ host: 'zzz1234567890.execute-api.us-east-2.amazonaws.com',
+ priority: 'u=0, i',
'sec-ch-ua': '"Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
@@ -281,11 +281,11 @@ const albEvent = {
team: 'node agent'
},
headers: {
- 'accept': 'application/json;v=4',
+ accept: 'application/json;v=4',
'content-length': '35',
'content-type': 'application/json',
- 'header2': 'value1,value2',
- 'host': 'examplehost.example.com',
+ header2: 'value1,value2',
+ host: 'examplehost.example.com',
'x-amzn-trace-id': 'Root=1-1234567890',
'x-forwarded-for': '10.10.10.10',
'x-forwarded-port': '443',
@@ -295,10 +295,10 @@ const albEvent = {
body: '{"exampleProperty": "exampleValue"}',
isBase64Encoded: false,
rawHeaders: {
- 'accept': 'application/json;v=4',
+ accept: 'application/json;v=4',
'content-length': '35',
'content-type': 'application/json',
- 'host': 'examplehost.example.com',
+ host: 'examplehost.example.com',
'x-amzn-trace-id': 'Root=1-1234567890',
'x-forwarded-for': '10.10.10.10',
'x-forwarded-port': '443',
diff --git a/test/unit/serverless/lambda-sample-events.js b/test/unit/serverless/lambda-sample-events.js
index bc1469860f..2bf8342989 100644
--- a/test/unit/serverless/lambda-sample-events.js
+++ b/test/unit/serverless/lambda-sample-events.js
@@ -3,7 +3,6 @@
* SPDX-License-Identifier: Apache-2.0
*/
-/* eslint-disable max-len */
'use strict'
const kinesisDataStreamEvent = {
@@ -227,7 +226,7 @@ const cloudFrontEvent = {
method: 'GET',
uri: '/picture.jpg',
headers: {
- 'host': [
+ host: [
{
key: 'Host',
value: 'd111111abcdef8.cloudfront.net'
@@ -265,7 +264,7 @@ const apiGatewayProxyEvent = {
path: '/test/hello',
httpMethod: 'GET',
headers: {
- 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
+ Accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate, lzma, sdch, br',
'Accept-Language': 'en-US,en;q=0.8',
'CloudFront-Forwarded-Proto': 'https',
@@ -274,10 +273,10 @@ const apiGatewayProxyEvent = {
'CloudFront-Is-SmartTV-Viewer': 'false',
'CloudFront-Is-Tablet-Viewer': 'false',
'CloudFront-Viewer-Country': 'US',
- 'Host': 'wt6mne2s9k.execute-api.us-west-2.amazonaws.com',
+ Host: 'wt6mne2s9k.execute-api.us-west-2.amazonaws.com',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6)',
- 'Via': '1.1 fb7cca60f0ecd82ce07790c9c5eef16c.cloudfront.net (CloudFront)',
+ Via: '1.1 fb7cca60f0ecd82ce07790c9c5eef16c.cloudfront.net (CloudFront)',
'X-Amz-Cf-Id': 'nBsWBOrSHMgnaROZJK1wGCZ9PcRcSpq_oSXZNQwQ10OTZL4cimZo3g==',
'X-Forwarded-For': '192.168.100.1, 192.168.1.1',
'X-Forwarded-Port': '443',
@@ -372,12 +371,12 @@ const albEvent = {
query: '1234ABCD'
},
headers: {
- 'accept':
+ accept:
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'accept-encoding': 'gzip',
'accept-language': 'en-US,en;q=0.9',
- 'connection': 'keep-alive',
- 'host': 'lambda-alb-123578498.us-east-2.elb.amazonaws.com',
+ connection: 'keep-alive',
+ host: 'lambda-alb-123578498.us-east-2.elb.amazonaws.com',
'upgrade-insecure-requests': '1',
'user-agent':
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36',
@@ -392,14 +391,14 @@ const albEvent = {
}
const cloudwatchScheduled = {
- 'id': 'cdc73f9d-aea9-11e3-9d5a-835b769c0d9c',
+ id: 'cdc73f9d-aea9-11e3-9d5a-835b769c0d9c',
'detail-type': 'Scheduled Event',
- 'source': 'aws.events',
- 'account': '{{{account-id}}}',
- 'time': '1970-01-01T00:00:00Z',
- 'region': 'us-west-2',
- 'resources': ['arn:aws:events:us-west-2:123456789012:rule/ExampleRule'],
- 'detail': {}
+ source: 'aws.events',
+ account: '{{{account-id}}}',
+ time: '1970-01-01T00:00:00Z',
+ region: 'us-west-2',
+ resources: ['arn:aws:events:us-west-2:123456789012:rule/ExampleRule'],
+ detail: {}
}
const sesEvent = {
@@ -501,13 +500,13 @@ const albEventWithMultiValueParameters = {
httpMethod: 'GET',
headers: null,
multiValueHeaders: {
- 'accept': [
+ accept: [
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'
],
'accept-encoding': ['gzip'],
'accept-language': ['en-US,en;q=0.9'],
- 'connection': ['keep-alive'],
- 'host': ['lambda-alb-123578498.us-east-2.elb.amazonaws.com'],
+ connection: ['keep-alive'],
+ host: ['lambda-alb-123578498.us-east-2.elb.amazonaws.com'],
'upgrade-insecure-requests': ['1'],
'user-agent': [
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'
@@ -539,18 +538,18 @@ const albEventWithMultiValueParameters = {
}
module.exports = {
- kinesisDataStreamEvent: kinesisDataStreamEvent,
- s3PutEvent: s3PutEvent,
- snsEvent: snsEvent,
- dynamoDbUpdateEvent: dynamoDbUpdateEvent,
- codeCommitEvent: codeCommitEvent,
- cloudFrontEvent: cloudFrontEvent,
- cloudFormationCreateRequestEvent: cloudFormationCreateRequestEvent,
- apiGatewayProxyEvent: apiGatewayProxyEvent,
- cloudWatchLogsEvent: cloudWatchLogsEvent,
- kinesisDataFirehoseEvent: kinesisDataFirehoseEvent,
- albEvent: albEvent,
- albEventWithMultiValueParameters: albEventWithMultiValueParameters,
- cloudwatchScheduled: cloudwatchScheduled,
- sesEvent: sesEvent
+ kinesisDataStreamEvent,
+ s3PutEvent,
+ snsEvent,
+ dynamoDbUpdateEvent,
+ codeCommitEvent,
+ cloudFrontEvent,
+ cloudFormationCreateRequestEvent,
+ apiGatewayProxyEvent,
+ cloudWatchLogsEvent,
+ kinesisDataFirehoseEvent,
+ albEvent,
+ albEventWithMultiValueParameters,
+ cloudwatchScheduled,
+ sesEvent
}
diff --git a/test/unit/shim/message-shim.test.js b/test/unit/shim/message-shim.test.js
index 00ad948b9e..9e1120264c 100644
--- a/test/unit/shim/message-shim.test.js
+++ b/test/unit/shim/message-shim.test.js
@@ -32,7 +32,7 @@ test('MessageShim', async function (t) {
shim.setLibrary(shim.RABBITMQ)
ctx.nr.wrappable = {
name: 'this is a name',
- bar: function barsName(unused, params) { return 'bar' }, // eslint-disable-line
+ bar: function barsName(unused, params) { return 'bar' },
fiz: function fizsName() {
return 'fiz'
},
@@ -298,8 +298,8 @@ test('MessageShim', async function (t) {
const val = {}
const toWrap = function () {
segment = shim.getSegment()
- return new Promise(function (res) {
- setTimeout(res, DELAY, val)
+ return new Promise(function (resolve) {
+ setTimeout(resolve, DELAY, val)
})
}
@@ -787,8 +787,8 @@ test('MessageShim', async function (t) {
const wrapped = shim.recordPurgeQueue(function () {
segment = shim.getSegment()
- return new Promise(function (res) {
- setTimeout(res, DELAY, val)
+ return new Promise(function (resolve) {
+ setTimeout(resolve, DELAY, val)
})
}, new MessageSpec({ promise: true }))
diff --git a/test/unit/shim/shim.test.js b/test/unit/shim/shim.test.js
index 71238d557d..2d5bbf8171 100644
--- a/test/unit/shim/shim.test.js
+++ b/test/unit/shim/shim.test.js
@@ -29,7 +29,7 @@ test('Shim', async function (t) {
ctx.nr.shim = new Shim(agent, 'test-module')
ctx.nr.wrappable = {
name: 'this is a name',
- bar: function barsName(unused, params) { return 'bar' }, // eslint-disable-line
+ bar: function barsName(unused, params) { return 'bar' },
fiz: function fizsName() {
return 'fiz'
},
@@ -219,7 +219,7 @@ test('Shim', async function (t) {
await t.test('should match the arity and name of the original when specified', function (t) {
const { shim } = t.nr
- // eslint-disable-next-line no-unused-vars
+
function toWrap(a, b) {}
const wrapped = shim.wrap(toWrap, {
wrapper: function () {
@@ -233,7 +233,6 @@ test('Shim', async function (t) {
})
await t.test('should pass items in the `args` parameter to the spec', function (t, end) {
- /* eslint-disable max-params */
const { shim, wrappable } = t.nr
shim.wrap(
wrappable,
@@ -246,7 +245,6 @@ test('Shim', async function (t) {
},
['a', 'b', 'c']
)
- /* eslint-enable max-params */
})
await t.test('should wrap the first parameter', function (t, end) {
@@ -684,7 +682,7 @@ test('Shim', async function (t) {
await t.test('should pass items in the `args` parameter to the spec', function (t, end) {
const { shim, toWrap } = t.nr
- /* eslint-disable max-params */
+
shim.wrapReturn(
toWrap,
'foo',
@@ -697,7 +695,6 @@ test('Shim', async function (t) {
},
['a', 'b', 'c']
)
- /* eslint-enable max-params */
toWrap.foo()
})
@@ -793,7 +790,7 @@ test('Shim', async function (t) {
await t.test('should pass items in the `args` parameter to the spec', function (t) {
const { shim, toWrap } = t.nr
- /* eslint-disable max-params */
+
shim.wrapClass(
toWrap,
'Foo',
@@ -805,7 +802,6 @@ test('Shim', async function (t) {
},
['a', 'b', 'c']
)
- /* eslint-enable max-params */
const foo = new toWrap.Foo()
assert.ok(foo)
@@ -2064,7 +2060,7 @@ test('Shim', async function (t) {
await t.test('should work with an object and a string index', function (t) {
const { cb, shim } = t.nr
- const opts = { a: 'a', cb: cb, b: 'b' }
+ const opts = { a: 'a', cb, b: 'b' }
shim.bindCallbackSegment({}, opts, 'cb')
assert.equal(shim.isWrapped(opts, 'cb'), true)
})
@@ -2268,6 +2264,7 @@ test('Shim', async function (t) {
await t.test('should not throw in a transaction when `func` has no `.apply` method', (t) => {
const { segment, shim } = t.nr
const func = function () {}
+ // eslint-disable-next-line no-proto
func.__proto__ = {}
assert.ok(!func.apply)
assert.doesNotThrow(() => shim.applySegment(func, segment))
@@ -2276,6 +2273,7 @@ test('Shim', async function (t) {
await t.test('should not throw out of a transaction', (t) => {
const { shim } = t.nr
const func = function () {}
+ // eslint-disable-next-line no-proto
func.__proto__ = {}
assert.ok(!func.apply)
assert.doesNotThrow(() => shim.applySegment(func, null))
@@ -2566,6 +2564,7 @@ test('Shim', async function (t) {
await t.test('should detect if an item is a string', function (t) {
const { shim } = t.nr
assert.ok(shim.isString('foobar'))
+ // eslint-disable-next-line sonarjs/no-primitive-wrappers, no-new-wrappers
assert.ok(shim.isString(new String('foobar')))
assert.ok(!shim.isString({}))
assert.ok(!shim.isString([]))
@@ -3089,7 +3088,7 @@ test('Shim', async function (t) {
await t.test('should properly resolve _moduleRoot as windows path', (t) => {
const { agent } = t.nr
- const root = `c:\\path\\to\\app\\node_modules\\@scope\\test`
+ const root = 'c:\\path\\to\\app\\node_modules\\@scope\\test'
const shim = new Shim(agent, '@scope/test', root)
assert.equal(shim._moduleRoot, root)
})
diff --git a/test/unit/shim/transaction-shim.test.js b/test/unit/shim/transaction-shim.test.js
index 06766ff21e..66cabc5d61 100644
--- a/test/unit/shim/transaction-shim.test.js
+++ b/test/unit/shim/transaction-shim.test.js
@@ -58,7 +58,7 @@ test('TransactionShim', async function (t) {
ctx.nr.shim = new TransactionShim(agent, 'test-module')
ctx.nr.wrappable = {
name: 'this is a name',
- bar: function barsName(unused, params) { return 'bar' }, // eslint-disable-line
+ bar: function barsName(unused, params) { return 'bar' },
fiz: function fizsName() {
return 'fiz'
},
diff --git a/test/unit/shim/webframework-shim.test.js b/test/unit/shim/webframework-shim.test.js
index ec1fe6f1ee..39163be032 100644
--- a/test/unit/shim/webframework-shim.test.js
+++ b/test/unit/shim/webframework-shim.test.js
@@ -47,7 +47,7 @@ function createMiddleware({ ctx, path }) {
} else {
resolve()
}
- } catch (e) {
+ } catch (err) {
reject(err)
}
}, 20)
@@ -63,7 +63,7 @@ test('WebFrameworkShim', async function (t) {
shim.setFramework(WebFrameworkShim.RESTIFY)
ctx.nr.wrappable = {
name: 'this is a name',
- bar: function barsName(unused, params) { return 'bar' }, // eslint-disable-line
+ bar: function barsName(unused, params) { return 'bar' },
fiz: function fizsName() {
return 'fiz'
},
@@ -525,7 +525,7 @@ test('WebFrameworkShim', async function (t) {
const wrapped = shim.recordMiddleware(
wrappable.getActiveSegment,
new MiddlewareSpec({
- type: type,
+ type,
route: '/foo/bar'
})
)
@@ -558,7 +558,7 @@ test('WebFrameworkShim', async function (t) {
const wrapped = shim.recordMiddleware(
wrappable.getActiveSegment,
new MiddlewareSpec({
- type: type,
+ type,
route: '/foo/bar'
})
)
@@ -585,7 +585,7 @@ test('WebFrameworkShim', async function (t) {
const wrapped = shim.recordMiddleware(
wrappable.getActiveSegment,
new MiddlewareSpec({
- type: type,
+ type,
route: ''
})
)
@@ -612,7 +612,7 @@ test('WebFrameworkShim', async function (t) {
const wrapped = shim.recordMiddleware(
wrappable.getActiveSegment,
new MiddlewareSpec({
- type: type,
+ type,
route: ['/one', '/two']
})
)
@@ -633,7 +633,7 @@ test('WebFrameworkShim', async function (t) {
const wrapped = shim.recordMiddleware(
wrappable.getActiveSegment,
new MiddlewareSpec({
- type: type,
+ type,
route: ''
})
)
@@ -763,7 +763,7 @@ test('WebFrameworkShim', async function (t) {
await t.test('should pop the namestate if there was no error', function (t, end) {
const { agent, req, shim, txInfo } = t.nr
const wrapped = shim.recordMiddleware(function () {},
- new MiddlewareSpec({ route: '/foo/bar' }))
+ new MiddlewareSpec({ route: '/foo/bar' }))
helper.runInTransaction(agent, function (tx) {
tx.nameState.appendPath('/')
@@ -1021,7 +1021,7 @@ test('WebFrameworkShim', async function (t) {
assert.equal(tx.nameState.getPath(), '/')
return new Promise(function (resolve, reject) {
assert.equal(agent.tracer.getTransaction(), tx)
- reject()
+ reject(Error('boom'))
})
})
})
@@ -1155,7 +1155,7 @@ test('WebFrameworkShim', async function (t) {
const wrapped = shim.recordParamware(
wrappable.getActiveSegment,
new MiddlewareSpec({
- type: type,
+ type,
name: 'foo'
})
)
diff --git a/test/unit/shimmer.test.js b/test/unit/shimmer.test.js
index ec1600628b..6892039777 100644
--- a/test/unit/shimmer.test.js
+++ b/test/unit/shimmer.test.js
@@ -30,7 +30,7 @@ async function makeModuleTests({ moduleName, relativePath, throwsError }, t) {
ctx.nr.errorThrown = 0
ctx.nr.agent = helper.instrumentMockedAgent()
const instrumentationOpts = {
- moduleName: moduleName,
+ moduleName,
onRequire: function (shim, module) {
ctx.nr.instrumentedModule = module
++ctx.nr.counter
@@ -686,7 +686,7 @@ test('Should not register when no hooks provided', async (t) => {
const moduleName = 'test name'
shimmer.registerInstrumentation({
- moduleName: moduleName
+ moduleName
})
assert.ok(!shimmer.registeredInstrumentations[moduleName])
@@ -832,7 +832,7 @@ test('Shimmer with logger mock', async (t) => {
assert.deepEqual(loggerMock.debug.args[0], [
'Failed to get version for `%s`, reason: %s',
'bogus',
- `no tracked items for module 'bogus'`
+ "no tracked items for module 'bogus'"
])
}
)
@@ -844,7 +844,7 @@ function clearCachedModules(modules) {
const requirePath = require.resolve(moduleName)
delete require.cache[requirePath]
return true
- } catch (e) {
+ } catch {
return false
}
})
diff --git a/test/unit/spans/batch-span-streamer.test.js b/test/unit/spans/batch-span-streamer.test.js
index cfba67815f..dd4ac0f63a 100644
--- a/test/unit/spans/batch-span-streamer.test.js
+++ b/test/unit/spans/batch-span-streamer.test.js
@@ -84,7 +84,8 @@ test('BatchSpanStreamer', async (t) => {
assert.equal(spanStreamer.spans.length, 1, 'one span queued')
/* emit drain event and allow writes */
- spanStreamer.stream.emit('drain', (fakeConnection.stream.write = () => true))
+ fakeConnection.stream.write = () => true
+ spanStreamer.stream.emit('drain', fakeConnection.stream.write)
assert.equal(spanStreamer.spans.length, 0, 'drained spans')
assert.equal(
@@ -118,15 +119,13 @@ test('BatchSpanStreamer', async (t) => {
assert.equal(spanStreamer.spans.length, 1, 'one span queued')
// emit drain event, allow writes and check for span.trace_id
- fakeConnection.stream.emit(
- 'drain',
- (fakeConnection.stream.write = ({ spans }) => {
- const [span] = spans
- assert.equal(span.trace_id, 'porridge', 'Should have formatted span')
-
- return true
- })
- )
+ fakeConnection.stream.write = ({ spans }) => {
+ const [span] = spans
+ assert.equal(span.trace_id, 'porridge', 'Should have formatted span')
+
+ return true
+ }
+ fakeConnection.stream.emit('drain', fakeConnection.stream.write)
assert.equal(spanStreamer.spans.length, 0, 'drained spans')
assert.equal(
diff --git a/test/unit/spans/map-to-streaming-type.test.js b/test/unit/spans/map-to-streaming-type.test.js
index d96ec4f7fc..1f15568626 100644
--- a/test/unit/spans/map-to-streaming-type.test.js
+++ b/test/unit/spans/map-to-streaming-type.test.js
@@ -46,7 +46,7 @@ test('should correctly convert bools when false', async () => {
})
test('should correctly convert integers', async () => {
- const intValue = 9999999999999999
+ const intValue = 999999999999999
const expected = {
int_value: intValue
}
diff --git a/test/unit/spans/span-event-aggregator.test.js b/test/unit/spans/span-event-aggregator.test.js
index 6d5da42061..1055e90de4 100644
--- a/test/unit/spans/span-event-aggregator.test.js
+++ b/test/unit/spans/span-event-aggregator.test.js
@@ -252,11 +252,11 @@ test('SpanAggregator', async (t) => {
assert.equal(
spanEventAggregator.periodMs,
4000,
- `should use span_event_harvest_config.report_period_ms`
+ 'should use span_event_harvest_config.report_period_ms'
)
})
- await t.test(`should use 'span_event_harvest_config.harvest_limit' from server`, (t) => {
+ await t.test("should use 'span_event_harvest_config.harvest_limit' from server", (t) => {
const { spanEventAggregator } = t.nr
const fakeConfig = {
span_event_harvest_config: {
@@ -273,10 +273,10 @@ test('SpanAggregator', async (t) => {
2000,
'should use span_event_harvest_config.harvest_limit'
)
- assert.equal(spanEventAggregator._items.limit, 2000, `should set queue limit`)
+ assert.equal(spanEventAggregator._items.limit, 2000, 'should set queue limit')
})
- await t.test(`should use 'span_event_harvest_config.harvest_limit' from server`, (t) => {
+ await t.test("should use 'span_event_harvest_config.harvest_limit' from server", (t) => {
const { spanEventAggregator } = t.nr
const fakeConfig = {
span_event_harvest_config: {
@@ -293,7 +293,7 @@ test('SpanAggregator', async (t) => {
2000,
'should use span_event_harvest_config.harvest_limit'
)
- assert.equal(spanEventAggregator._items.limit, 2000, `should set queue limit`)
+ assert.equal(spanEventAggregator._items.limit, 2000, 'should set queue limit')
})
await t.test('should use max_samples_stored as-is when no span harvest config', (t) => {
diff --git a/test/unit/spans/span-streamer-helpers.js b/test/unit/spans/span-streamer-helpers.js
index d4d6a3b98f..a5b267e010 100644
--- a/test/unit/spans/span-streamer-helpers.js
+++ b/test/unit/spans/span-streamer-helpers.js
@@ -65,8 +65,8 @@ helpers.createMetricAggregator = function createMetricAggregator() {
{
// runId: RUN_ID,
apdexT: 0.5,
- mapper: mapper,
- normalizer: normalizer
+ mapper,
+ normalizer
},
{},
{ add() {} }
diff --git a/test/unit/spans/span-streamer.test.js b/test/unit/spans/span-streamer.test.js
index 48451f7a27..5124712c75 100644
--- a/test/unit/spans/span-streamer.test.js
+++ b/test/unit/spans/span-streamer.test.js
@@ -72,7 +72,8 @@ test('SpanStreamer', async (t) => {
assert.equal(spanStreamer.spans.length, 1, 'one span queued')
/* emit drain event and allow writes */
- fakeConnection.stream.emit('drain', (fakeConnection.stream.write = () => true))
+ fakeConnection.stream.write = () => true
+ fakeConnection.stream.emit('drain', fakeConnection.stream.write)
assert.equal(spanStreamer.spans.length, 0, 'drained spans')
assert.equal(
@@ -105,13 +106,14 @@ test('SpanStreamer', async (t) => {
assert.equal(spanStreamer.spans.length, 1, 'one span queued')
/* emit drain event, allow writes and check for span.trace_id */
+ fakeConnection.stream.write = (span) => {
+ assert.equal(span.trace_id, 'porridge', 'Should have formatted span')
+
+ return true
+ }
fakeConnection.stream.emit(
'drain',
- (fakeConnection.stream.write = (span) => {
- assert.equal(span.trace_id, 'porridge', 'Should have formatted span')
-
- return true
- })
+ fakeConnection.stream.write
)
assert.equal(spanStreamer.spans.length, 0, 'drained spans')
diff --git a/test/unit/system-info.test.js b/test/unit/system-info.test.js
index 242f6c2f05..5672230159 100644
--- a/test/unit/system-info.test.js
+++ b/test/unit/system-info.test.js
@@ -158,7 +158,7 @@ test('getProcessorStats - linux', async (t) => {
'./utilization/common': {
readProc: readProcFunction
},
- 'os': {
+ os: {
platform: platformFunction
}
})
@@ -302,7 +302,7 @@ test('getMemoryStats - linux', async (t) => {
'./utilization/common': {
readProc: readProcFunction
},
- 'os': {
+ os: {
platform: platformFunction
}
})
@@ -393,7 +393,7 @@ test('systemInfo edge cases', async (t) => {
'./utilization/docker-info': {
getBootId: (agent, callback) => callback(null)
},
- 'os': {
+ os: {
platform: () => 'something weird'
}
})
@@ -407,7 +407,7 @@ test('systemInfo edge cases', async (t) => {
return new Promise((resolve) => {
systemInfo._getProcessorStats = () => {}
- systemInfo(agentMock, (err, result) => {
+ systemInfo(agentMock, (_, result) => {
resolve(result)
})
})
diff --git a/test/unit/transaction.test.js b/test/unit/transaction.test.js
index 8a643a45d0..562d3485cd 100644
--- a/test/unit/transaction.test.js
+++ b/test/unit/transaction.test.js
@@ -694,7 +694,7 @@ test('Transaction methods', async (t) => {
initiator: 'cli',
attributes: {
'Attr-Test': 'value',
- 'attr2Test': 'value1',
+ attr2Test: 'value1',
'xTest-Header': 'value2'
}
}
diff --git a/test/unit/transaction/trace/index.test.js b/test/unit/transaction/trace/index.test.js
index b3641a0cd9..a6082140e5 100644
--- a/test/unit/transaction/trace/index.test.js
+++ b/test/unit/transaction/trace/index.test.js
@@ -356,7 +356,7 @@ test('when serializing synchronously', async (t) => {
const { details } = t.nr
const json = details.trace.generateJSONSync()
- assert.match(json[4], /^[a-zA-Z0-9\+\/]+={0,2}$/, 'should be base64 encoded')
+ assert.match(json[4], /^[a-zA-Z0-9+/]+={0,2}$/, 'should be base64 encoded')
const data = await codecDecodeAsync(json[4])
assert.deepEqual(data, details.rootNode)
@@ -439,7 +439,7 @@ test('when serializing asynchronously', async (t) => {
async (t) => {
const { details } = t.nr
const json = await details.trace.generateJSONAsync()
- assert.match(json[4], /^[a-zA-Z0-9\+\/]+={0,2}$/, 'should be base64 encoded')
+ assert.match(json[4], /^[a-zA-Z0-9+/]+={0,2}$/, 'should be base64 encoded')
const data = await codecDecodeAsync(json[4])
assert.deepEqual(data, details.rootNode)
@@ -987,7 +987,7 @@ async function makeTrace(agent) {
{
'request.uri': '/test?test=value',
'request.parameters.test': 'value',
- 'nr_exclusive_duration_millis': 8
+ nr_exclusive_duration_millis: 8
},
[
// TODO: ensure that the ordering is correct WRT start time
diff --git a/test/unit/transaction/trace/segment.test.js b/test/unit/transaction/trace/segment.test.js
index a56a4444a6..33aa97ead0 100644
--- a/test/unit/transaction/trace/segment.test.js
+++ b/test/unit/transaction/trace/segment.test.js
@@ -58,7 +58,8 @@ test('TraceSegment', async (t) => {
const { agent } = t.nr
const trans = new Transaction(agent)
assert.doesNotThrow(function noCallback() {
- new TraceSegment(trans, 'UnitTest') // eslint-disable-line no-new
+ // eslint-disable-next-line no-new
+ new TraceSegment(trans, 'UnitTest')
})
const working = new TraceSegment(trans, 'UnitTest', function () {
end()
@@ -171,7 +172,8 @@ test('TraceSegment', async (t) => {
assert.equal(tx.numSegments, 1)
assert.equal(agent.activeTransactions, 1)
- const segment = new TraceSegment(tx, 'Test') // eslint-disable-line no-unused-vars
+ // eslint-disable-next-line sonarjs/no-unused-vars, sonarjs/no-dead-store, no-unused-vars
+ const segment = new TraceSegment(tx, 'Test')
assert.equal(agent.totalActiveSegments, 2)
assert.equal(agent.segmentsCreatedInHarvest, 2)
assert.equal(tx.numSegments, 2)
@@ -328,7 +330,7 @@ test('with children created from URLs', async (t) => {
1,
'WebTransaction/NormalizedUri/*',
{
- 'nr_exclusive_duration_millis': 1,
+ nr_exclusive_duration_millis: 1,
'request.parameters.test1': 'value1',
'request.parameters.test2': true,
'request.parameters.test3': '50',
@@ -516,7 +518,7 @@ test('with attributes.enabled set', async (t) => {
1,
'WebTransaction/NormalizedUri/*',
{
- 'nr_exclusive_duration_millis': 1,
+ nr_exclusive_duration_millis: 1,
'request.parameters.test2': true,
'request.parameters.test3': '50'
},
diff --git a/test/unit/transaction/trace/trace-aggregator.test.js b/test/unit/transaction/trace/trace-aggregator.test.js
index d3e9aefaaa..c07159cc01 100644
--- a/test/unit/transaction/trace/trace-aggregator.test.js
+++ b/test/unit/transaction/trace/trace-aggregator.test.js
@@ -247,7 +247,7 @@ test('TraceAggregator', async function (t) {
assert.ok(!err)
// This 6th transaction should not be collected.
assert.ok(!agent.traces.trace)
- createTransaction(agent, `/test-0`, 500)
+ createTransaction(agent, '/test-0', 500)
assert.ok(!agent.traces.trace, '6th trace to collect')
end()
}
diff --git a/test/unit/util/async-each-limit.test.js b/test/unit/util/async-each-limit.test.js
index 884b140b77..c1f623f153 100644
--- a/test/unit/util/async-each-limit.test.js
+++ b/test/unit/util/async-each-limit.test.js
@@ -40,7 +40,7 @@ test('eachLimit should limit concurrent async executions', async () => {
try {
await access(file)
return true
- } catch (err) {
+ } catch {
return false
}
}
diff --git a/test/unit/util/code-level-metrics.test.js b/test/unit/util/code-level-metrics.test.js
index f6c7c86ad0..652976796f 100644
--- a/test/unit/util/code-level-metrics.test.js
+++ b/test/unit/util/code-level-metrics.test.js
@@ -9,7 +9,7 @@ const test = require('node:test')
const { addCLMAttributes } = require('../../../lib/util/code-level-metrics')
const { anon, arrow, named } = require('../../lib/clm-helper')
const path = require('path')
-const helperPath = path.resolve(`${__dirname}/../../lib/clm-helper.js`)
+const helperPath = path.resolve(path.join(__dirname, '/../../lib/clm-helper.js'))
const sinon = require('sinon')
const symbols = require('../../../lib/symbols')
const { assertExactClmAttrs } = require('../../lib/custom-assertions')
diff --git a/test/unit/util/deep-equal.test.js b/test/unit/util/deep-equal.test.js
index 5d45994a72..929cc4400b 100644
--- a/test/unit/util/deep-equal.test.js
+++ b/test/unit/util/deep-equal.test.js
@@ -10,7 +10,8 @@ const assert = require('node:assert')
const deepEqual = require('../../../lib/util/deep-equal')
function functionA(a) {
- return a++
+ a = a + 1
+ return a
}
test('deepEqual handles all the edge cases', async function (t) {
@@ -56,11 +57,9 @@ test('deepEqual handles all the edge cases', async function (t) {
assert.ok(deepEqual([0, 1], [0, 1]), 'arrays check out')
- // eslint-disable-next-line sonarjs/prefer-object-literal -- Disabled so we can create cyclical objects
const cyclicA = {}
cyclicA.x = cyclicA
- // eslint-disable-next-line sonarjs/prefer-object-literal -- Disabled so we can create cyclical objects
const cyclicB = {}
cyclicB.x = cyclicB
diff --git a/test/unit/util/flatten.test.js b/test/unit/util/flatten.test.js
index 1d345f6a22..ffd2d8d06d 100644
--- a/test/unit/util/flatten.test.js
+++ b/test/unit/util/flatten.test.js
@@ -15,18 +15,17 @@ test('util.flatten', async (t) => {
assert.deepStrictEqual(flatten({}, '', { a: 5, b: true }), { a: 5, b: true }, '1 level')
assert.deepStrictEqual(
flatten({}, '', { a: 5, b: { c: true, d: 7 } }),
- { 'a': 5, 'b.c': true, 'b.d': 7 },
+ { a: 5, 'b.c': true, 'b.d': 7 },
'2 levels'
)
assert.deepStrictEqual(
flatten({}, '', { a: 5, b: { c: true, d: 7, e: { foo: 'efoo', bar: 'ebar' } } }),
- { 'a': 5, 'b.c': true, 'b.d': 7, 'b.e.foo': 'efoo', 'b.e.bar': 'ebar' },
+ { a: 5, 'b.c': true, 'b.d': 7, 'b.e.foo': 'efoo', 'b.e.bar': 'ebar' },
'3 levels'
)
})
await t.test('flattens recursive objects', () => {
- // eslint-disable-next-line sonarjs/prefer-object-literal -- Disabled so we can create cyclical objects
const obj = {}
obj.x = obj
assert.deepStrictEqual(flatten({}, '', obj), {})
@@ -49,7 +48,6 @@ test('util.flatten.keys', async (t) => {
})
await t.test('flattens recursive objects', () => {
- // eslint-disable-next-line sonarjs/prefer-object-literal -- Disabled so we can create cyclical objects
const obj = {}
obj.x = obj
assert.deepStrictEqual(flatten.keys(obj), [])
diff --git a/test/unit/util/hashes.test.js b/test/unit/util/hashes.test.js
index 79f59590be..d3db0498ca 100644
--- a/test/unit/util/hashes.test.js
+++ b/test/unit/util/hashes.test.js
@@ -61,7 +61,6 @@ test('getHash', { skip: major > 18 }, async (t) => {
* When fully disabled, this test can likely be removed.
* https://nodejs.org/api/deprecations.html#DEP0091
*/
- /* eslint-disable node/no-deprecated-api */
await t.test('should not crash when changing the DEFAULT_ENCODING key on crypto', () => {
const crypto = require('node:crypto')
const oldEncoding = crypto.DEFAULT_ENCODING
diff --git a/test/unit/util/llm-utils.test.js b/test/unit/util/llm-utils.test.js
index 624eaa47ea..e05ae51d9b 100644
--- a/test/unit/util/llm-utils.test.js
+++ b/test/unit/util/llm-utils.test.js
@@ -11,7 +11,7 @@ const { AsyncLocalStorage } = require('async_hooks')
test('extractLlmAttributes', () => {
const context = {
- 'skip': 1,
+ skip: 1,
'llm.get': 2,
'fllm.skip': 3
}
@@ -50,7 +50,7 @@ test('extractLlmContext', async (t) => {
await t.test('extract LLM context', (t, end) => {
const { tx, agent } = t.nr
- tx._llmContextManager.run({ 'llm.test': 1, 'skip': 2 }, () => {
+ tx._llmContextManager.run({ 'llm.test': 1, skip: 2 }, () => {
const llmContext = extractLlmContext(agent)
assert.equal(llmContext['llm.test'], 1)
assert.ok(!llmContext.skip)
diff --git a/test/unit/utilization/ecs-info.test.js b/test/unit/utilization/ecs-info.test.js
index e545626a51..fb579664c6 100644
--- a/test/unit/utilization/ecs-info.test.js
+++ b/test/unit/utilization/ecs-info.test.js
@@ -19,12 +19,12 @@ async function getServer() {
switch (req.url) {
case '/json-error': {
- res.end(`{"invalid":"json"`)
+ res.end('{"invalid":"json"')
break
}
case '/no-id': {
- res.end(`{}`)
+ res.end('{}')
break
}
diff --git a/test/versioned/amqplib/amqp-utils.js b/test/versioned/amqplib/amqp-utils.js
index f9cc6228ce..121697fdd0 100644
--- a/test/versioned/amqplib/amqp-utils.js
+++ b/test/versioned/amqplib/amqp-utils.js
@@ -294,14 +294,14 @@ function getChannel(amqplib, cb) {
conn.createChannel(function (err, channel) {
cb(err, {
connection: conn,
- channel: channel
+ channel
})
})
})
} else {
return amqplib.connect(CON_STRING).then(function (conn) {
return conn.createChannel().then(function (channel) {
- return { connection: conn, channel: channel }
+ return { connection: conn, channel }
})
})
}
diff --git a/test/versioned/aws-sdk-v2/amazon-dax-client.test.js b/test/versioned/aws-sdk-v2/amazon-dax-client.test.js
index 6070538c7f..6702f44e9c 100644
--- a/test/versioned/aws-sdk-v2/amazon-dax-client.test.js
+++ b/test/versioned/aws-sdk-v2/amazon-dax-client.test.js
@@ -77,8 +77,8 @@ test('amazon-dax-client', async (t) => {
port_path_or_id: 'unknown',
collection: 'TableDoesNotExist',
product: 'DynamoDB'
- }),
- end()
+ })
+ end()
})
})
})
diff --git a/test/versioned/aws-sdk-v2/aws-sdk.test.js b/test/versioned/aws-sdk-v2/aws-sdk.test.js
index 517c63bf7f..90ae2e7be8 100644
--- a/test/versioned/aws-sdk-v2/aws-sdk.test.js
+++ b/test/versioned/aws-sdk-v2/aws-sdk.test.js
@@ -51,7 +51,7 @@ test('aws-sdk', async (t) => {
const s3 = new AWS.S3({
apiVersion: '2006-03-01',
credentials: FAKE_CREDENTIALS,
- endpoint: endpoint,
+ endpoint,
// allows using generic endpoint, instead of needing a
// bucket.endpoint server setup.
s3ForcePathStyle: true,
diff --git a/test/versioned/aws-sdk-v2/dynamodb.test.js b/test/versioned/aws-sdk-v2/dynamodb.test.js
index 1a27862846..f9a06c1e1e 100644
--- a/test/versioned/aws-sdk-v2/dynamodb.test.js
+++ b/test/versioned/aws-sdk-v2/dynamodb.test.js
@@ -109,10 +109,10 @@ function finish(end, tests, tx) {
const attrs = segment.attributes.get(common.SEGMENT_DESTINATION)
attrs.port_path_or_id = parseInt(attrs.port_path_or_id, 10)
match(attrs, {
- 'host': String,
- 'port_path_or_id': Number,
- 'product': 'DynamoDB',
- 'collection': String,
+ host: String,
+ port_path_or_id: Number,
+ product: 'DynamoDB',
+ collection: String,
'aws.operation': operation,
'aws.requestId': String,
'aws.region': 'us-east-1',
diff --git a/test/versioned/aws-sdk-v2/http-services.test.js b/test/versioned/aws-sdk-v2/http-services.test.js
index ecf9e525df..fef4215c6d 100644
--- a/test/versioned/aws-sdk-v2/http-services.test.js
+++ b/test/versioned/aws-sdk-v2/http-services.test.js
@@ -70,7 +70,7 @@ test('AWS HTTP Services', async (t) => {
helper.runInTransaction(agent, (tx) => {
const service = new AWS.ELB({
credentials: FAKE_CREDENTIALS,
- endpoint: endpoint
+ endpoint
})
service.addTags(
{
@@ -99,7 +99,7 @@ test('AWS HTTP Services', async (t) => {
helper.runInTransaction(agent, (tx) => {
const service = new AWS.ElastiCache({
credentials: FAKE_CREDENTIALS,
- endpoint: endpoint
+ endpoint
})
service.addTagsToResource(
{
@@ -125,7 +125,7 @@ test('AWS HTTP Services', async (t) => {
helper.runInTransaction(agent, (tx) => {
const service = new AWS.Lambda({
credentials: FAKE_CREDENTIALS,
- endpoint: endpoint
+ endpoint
})
service.addLayerVersionPermission(
{
@@ -150,7 +150,7 @@ test('AWS HTTP Services', async (t) => {
helper.runInTransaction(agent, (tx) => {
const service = new AWS.RDS({
credentials: FAKE_CREDENTIALS,
- endpoint: endpoint
+ endpoint
})
service.addRoleToDBCluster(
{
@@ -170,7 +170,7 @@ test('AWS HTTP Services', async (t) => {
helper.runInTransaction(agent, (tx) => {
const service = new AWS.Redshift({
credentials: FAKE_CREDENTIALS,
- endpoint: endpoint
+ endpoint
})
service.acceptReservedNodeExchange(
{
@@ -190,7 +190,7 @@ test('AWS HTTP Services', async (t) => {
helper.runInTransaction(agent, (tx) => {
const service = new AWS.Rekognition({
credentials: FAKE_CREDENTIALS,
- endpoint: endpoint
+ endpoint
})
service.compareFaces(
{
@@ -221,7 +221,7 @@ test('AWS HTTP Services', async (t) => {
helper.runInTransaction(agent, (tx) => {
const service = new AWS.SES({
credentials: FAKE_CREDENTIALS,
- endpoint: endpoint
+ endpoint
})
service.cloneReceiptRuleSet(
{
diff --git a/test/versioned/aws-sdk-v2/sns.test.js b/test/versioned/aws-sdk-v2/sns.test.js
index 372cb8e71d..5122db0c84 100644
--- a/test/versioned/aws-sdk-v2/sns.test.js
+++ b/test/versioned/aws-sdk-v2/sns.test.js
@@ -84,6 +84,6 @@ function finish(end, tx) {
'aws.requestId': String,
'aws.service': 'Amazon SNS',
'aws.region': 'us-east-1'
- }),
- end()
+ })
+ end()
}
diff --git a/test/versioned/aws-sdk-v2/sqs.test.js b/test/versioned/aws-sdk-v2/sqs.test.js
index 4b97825b85..db711a4db9 100644
--- a/test/versioned/aws-sdk-v2/sqs.test.js
+++ b/test/versioned/aws-sdk-v2/sqs.test.js
@@ -28,7 +28,7 @@ test('SQS API', async (t) => {
const endpoint = `http://localhost:${server.address().port}`
ctx.nr.sqs = new AWS.SQS({
credentials: FAKE_CREDENTIALS,
- endpoint: endpoint,
+ endpoint,
apiVersion: '2012-11-05',
region: AWS_REGION
})
diff --git a/test/versioned/aws-sdk-v3/bedrock-chat-completions.test.js b/test/versioned/aws-sdk-v3/bedrock-chat-completions.test.js
index ffc90e570e..119189d0a9 100644
--- a/test/versioned/aws-sdk-v3/bedrock-chat-completions.test.js
+++ b/test/versioned/aws-sdk-v3/bedrock-chat-completions.test.js
@@ -15,6 +15,10 @@ const { assertSegments, match } = require('../../lib/custom-assertions')
const promiseResolvers = require('../../lib/promise-resolvers')
const { tspl } = require('@matteo.collina/tspl')
+function consumeStreamChunk() {
+ // A no-op function used to consume chunks of a stream.
+}
+
const requests = {
ai21: (prompt, modelId) => ({
body: JSON.stringify({ prompt, temperature: 0.5, maxTokens: 100 }),
@@ -186,7 +190,7 @@ test.afterEach(afterEach)
const response = await client.send(command)
for await (const event of response.body) {
// no-op iteration over the stream in order to exercise the instrumentation
- event
+ consumeStreamChunk(event)
}
const events = agent.customEventAggregator.events.toArray()
@@ -230,7 +234,7 @@ test.afterEach(afterEach)
const [[, feedback]] = recordedEvents.filter(([{ type }]) => type === 'LlmFeedbackMessage')
match(feedback, {
- id: /[\w\d]{32}/,
+ id: /\w{32}/,
trace_id: traceId,
category: 'test-event',
rating: '5 star',
@@ -289,10 +293,10 @@ test.afterEach(afterEach)
'http.statusCode': 400,
'error.message': expectedMsg,
'error.code': expectedType,
- 'completion_id': /[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12}/
+ completion_id: /\w{8}-\w{4}-\w{4}-\w{4}-\w{12}/
},
agentAttributes: {
- spanId: /[\w\d]+/
+ spanId: /\w+/
}
})
@@ -364,9 +368,9 @@ test.afterEach(afterEach)
})
})
-test(`cohere embedding streaming works`, async (t) => {
+test('cohere embedding streaming works', async (t) => {
const { bedrock, client, agent } = t.nr
- const prompt = `embed text cohere stream`
+ const prompt = 'embed text cohere stream'
const input = {
body: JSON.stringify({
texts: prompt.split(' '),
@@ -383,7 +387,7 @@ test(`cohere embedding streaming works`, async (t) => {
const response = await client.send(command)
for await (const event of response.body) {
// no-op iteration over the stream in order to exercise the instrumentation
- event
+ consumeStreamChunk(event)
}
const events = agent.customEventAggregator.events.toArray()
@@ -396,10 +400,10 @@ test(`cohere embedding streaming works`, async (t) => {
})
})
-test(`ai21: should properly create errors on create completion (streamed)`, async (t) => {
+test('ai21: should properly create errors on create completion (streamed)', async (t) => {
const { bedrock, client, agent, expectedExternalPath } = t.nr
const modelId = 'ai21.j2-mid-v1'
- const prompt = `text ai21 ultimate question error streamed`
+ const prompt = 'text ai21 ultimate question error streamed'
const input = requests.ai21(prompt, modelId)
const command = new bedrock.InvokeModelWithResponseStreamCommand(input)
@@ -426,10 +430,10 @@ test(`ai21: should properly create errors on create completion (streamed)`, asyn
'http.statusCode': 400,
'error.message': expectedMsg,
'error.code': expectedType,
- 'completion_id': /[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12}/
+ completion_id: /\w{8}-\w{4}-\w{4}-\w{4}-\w{12}/
},
agentAttributes: {
- spanId: /[\w\d]+/
+ spanId: /\w+/
}
})
@@ -459,10 +463,10 @@ test(`ai21: should properly create errors on create completion (streamed)`, asyn
})
})
-test(`models that do not support streaming should be handled`, async (t) => {
+test('models that do not support streaming should be handled', async (t) => {
const { bedrock, client, agent, expectedExternalPath } = t.nr
const modelId = 'amazon.titan-embed-text-v1'
- const prompt = `embed text amazon error streamed`
+ const prompt = 'embed text amazon error streamed'
const input = requests.amazon(prompt, modelId)
const command = new bedrock.InvokeModelWithResponseStreamCommand(input)
@@ -489,10 +493,10 @@ test(`models that do not support streaming should be handled`, async (t) => {
'http.statusCode': 400,
'error.message': expectedMsg,
'error.code': expectedType,
- 'completion_id': undefined
+ completion_id: undefined
},
agentAttributes: {
- spanId: /[\w\d]+/
+ spanId: /\w+/
}
})
@@ -514,10 +518,10 @@ test(`models that do not support streaming should be handled`, async (t) => {
})
})
-test(`models should properly create errors on stream interruption`, async (t) => {
+test('models should properly create errors on stream interruption', async (t) => {
const { bedrock, client, agent } = t.nr
const modelId = 'amazon.titan-text-express-v1'
- const prompt = `text amazon bad stream`
+ const prompt = 'text amazon bad stream'
const input = requests.amazon(prompt, modelId)
const command = new bedrock.InvokeModelWithResponseStreamCommand(input)
@@ -548,7 +552,7 @@ test('should not instrument stream when disabled', async (t) => {
const modelId = 'amazon.titan-text-express-v1'
const { bedrock, client, agent } = t.nr
agent.config.ai_monitoring.streaming.enabled = false
- const prompt = `text amazon ultimate question streamed`
+ const prompt = 'text amazon ultimate question streamed'
const input = requests.amazon(prompt, modelId)
const command = new bedrock.InvokeModelWithResponseStreamCommand(input)
@@ -571,11 +575,11 @@ test('should not instrument stream when disabled', async (t) => {
assert.deepEqual(
chunk,
{
- 'outputText': '42',
- 'index': 0,
- 'totalOutputTextTokenCount': 75,
- 'completionReason': 'endoftext',
- 'inputTextTokenCount': 13,
+ outputText: '42',
+ index: 0,
+ totalOutputTextTokenCount: 75,
+ completionReason: 'endoftext',
+ inputTextTokenCount: 13,
'amazon-bedrock-invocationMetrics': {
inputTokenCount: 8,
outputTokenCount: 4,
@@ -596,7 +600,7 @@ test('should not instrument stream when disabled', async (t) => {
})
assert.equal(metrics.callCount > 0, true, 'should set framework metric')
const supportabilityMetrics = agent.metrics.getOrCreateMetric(
- `Supportability/Nodejs/ML/Streaming/Disabled`
+ 'Supportability/Nodejs/ML/Streaming/Disabled'
)
assert.equal(
supportabilityMetrics.callCount > 0,
diff --git a/test/versioned/aws-sdk-v3/bedrock-embeddings.test.js b/test/versioned/aws-sdk-v3/bedrock-embeddings.test.js
index 830d750aab..99c96ab7e0 100644
--- a/test/versioned/aws-sdk-v3/bedrock-embeddings.test.js
+++ b/test/versioned/aws-sdk-v3/bedrock-embeddings.test.js
@@ -88,18 +88,18 @@ test.afterEach(afterEach)
assert.equal(events.length, 1)
const embedding = events.filter(([{ type }]) => type === 'LlmEmbedding')[0]
const expectedEmbedding = {
- 'id': /[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12}/,
- 'appName': 'New Relic for Node.js tests',
- 'request_id': '743dd35b-744b-4ddf-b5c6-c0f3de2e3142',
- 'trace_id': tx.traceId,
- 'span_id': tx.trace.root.children[0].id,
+ id: /\w{8}-\w{4}-\w{4}-\w{4}-\w{12}/,
+ appName: 'New Relic for Node.js tests',
+ request_id: '743dd35b-744b-4ddf-b5c6-c0f3de2e3142',
+ trace_id: tx.traceId,
+ span_id: tx.trace.root.children[0].id,
'response.model': modelId,
- 'vendor': 'bedrock',
- 'ingest_source': 'Node',
+ vendor: 'bedrock',
+ ingest_source: 'Node',
'request.model': modelId,
- 'duration': tx.trace.root.children[0].getDurationInMillis(),
- 'input': prompt,
- 'error': false
+ duration: tx.trace.root.children[0].getDurationInMillis(),
+ input: prompt,
+ error: false
}
assert.equal(embedding[0].type, 'LlmEmbedding')
@@ -149,10 +149,10 @@ test.afterEach(afterEach)
'http.statusCode': 400,
'error.message': expectedMsg,
'error.code': expectedType,
- 'embedding_id': /[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12}/
+ embedding_id: /\w{8}-\w{4}-\w{4}-\w{4}-\w{12}/
},
agentAttributes: {
- spanId: /[\w\d]+/
+ spanId: /\w+/
}
})
@@ -165,18 +165,18 @@ test.afterEach(afterEach)
assert.equal(events.length, 1)
const embedding = events.filter(([{ type }]) => type === 'LlmEmbedding')[0]
const expectedEmbedding = {
- 'id': /[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12}/,
- 'appName': 'New Relic for Node.js tests',
- 'request_id': '743dd35b-744b-4ddf-b5c6-c0f3de2e3142',
- 'trace_id': tx.traceId,
- 'span_id': tx.trace.root.children[0].id,
+ id: /\w{8}-\w{4}-\w{4}-\w{4}-\w{12}/,
+ appName: 'New Relic for Node.js tests',
+ request_id: '743dd35b-744b-4ddf-b5c6-c0f3de2e3142',
+ trace_id: tx.traceId,
+ span_id: tx.trace.root.children[0].id,
'response.model': modelId,
- 'vendor': 'bedrock',
- 'ingest_source': 'Node',
+ vendor: 'bedrock',
+ ingest_source: 'Node',
'request.model': modelId,
- 'duration': tx.trace.root.children[0].getDurationInMillis(),
- 'input': prompt,
- 'error': true
+ duration: tx.trace.root.children[0].getDurationInMillis(),
+ input: prompt,
+ error: true
}
assert.equal(embedding[0].type, 'LlmEmbedding')
diff --git a/test/versioned/aws-sdk-v3/client-dynamodb.test.js b/test/versioned/aws-sdk-v3/client-dynamodb.test.js
index c6ead32433..587f9bb619 100644
--- a/test/versioned/aws-sdk-v3/client-dynamodb.test.js
+++ b/test/versioned/aws-sdk-v3/client-dynamodb.test.js
@@ -210,10 +210,10 @@ function finish({ commands, tx, setDatastoreSpy }) {
const accountId = tx.agent.config.cloud.aws.account_id
match(attrs, {
- 'host': String,
- 'port_path_or_id': Number,
- 'product': 'DynamoDB',
- 'collection': String,
+ host: String,
+ port_path_or_id: Number,
+ product: 'DynamoDB',
+ collection: String,
'aws.operation': command.constructor.name,
'aws.requestId': String,
'aws.region': 'us-east-1',
diff --git a/test/versioned/aws-sdk-v3/common.js b/test/versioned/aws-sdk-v3/common.js
index 0a58c1a816..92259c8193 100644
--- a/test/versioned/aws-sdk-v3/common.js
+++ b/test/versioned/aws-sdk-v3/common.js
@@ -72,16 +72,16 @@ function checkExternals({ service, operations, tx, end }) {
function assertChatCompletionMessages({ tx, chatMsgs, expectedId, modelId, prompt, resContent }) {
const baseMsg = {
- 'appName': 'New Relic for Node.js tests',
- 'request_id': 'eda0760a-c3f0-4fc1-9a1e-75559d642866',
- 'trace_id': tx.traceId,
- 'span_id': tx.trace.root.children[0].id,
+ appName: 'New Relic for Node.js tests',
+ request_id: 'eda0760a-c3f0-4fc1-9a1e-75559d642866',
+ trace_id: tx.traceId,
+ span_id: tx.trace.root.children[0].id,
'response.model': modelId,
- 'vendor': 'bedrock',
- 'ingest_source': 'Node',
- 'role': 'user',
- 'is_response': false,
- 'completion_id': /[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12}/
+ vendor: 'bedrock',
+ ingest_source: 'Node',
+ role: 'user',
+ is_response: false,
+ completion_id: /\w{8}-\w{4}-\w{4}-\w{4}-\w{12}/
}
chatMsgs.forEach((msg) => {
@@ -112,22 +112,22 @@ function assertChatCompletionMessages({ tx, chatMsgs, expectedId, modelId, promp
function assertChatCompletionSummary({ tx, modelId, chatSummary, error = false, numMsgs = 2 }) {
const expectedChatSummary = {
- 'id': /[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12}/,
- 'appName': 'New Relic for Node.js tests',
- 'request_id': 'eda0760a-c3f0-4fc1-9a1e-75559d642866',
+ id: /\w{8}-\w{4}-\w{4}-\w{4}-\w{12}/,
+ appName: 'New Relic for Node.js tests',
+ request_id: 'eda0760a-c3f0-4fc1-9a1e-75559d642866',
'llm.conversation_id': 'convo-id',
- 'trace_id': tx.traceId,
- 'span_id': tx.trace.root.children[0].id,
+ trace_id: tx.traceId,
+ span_id: tx.trace.root.children[0].id,
'response.model': modelId,
- 'vendor': 'bedrock',
- 'ingest_source': 'Node',
+ vendor: 'bedrock',
+ ingest_source: 'Node',
'request.model': modelId,
- 'duration': tx.trace.root.children[0].getDurationInMillis(),
+ duration: tx.trace.root.children[0].getDurationInMillis(),
'response.number_of_messages': error ? 1 : numMsgs,
'response.choices.finish_reason': error ? undefined : 'endoftext',
'request.temperature': 0.5,
'request.max_tokens': 100,
- 'error': error
+ error
}
assert.equal(chatSummary[0].type, 'LlmChatCompletionSummary')
diff --git a/test/versioned/aws-sdk-v3/lambda.test.js b/test/versioned/aws-sdk-v3/lambda.test.js
index f163bc1e92..c1f012443f 100644
--- a/test/versioned/aws-sdk-v3/lambda.test.js
+++ b/test/versioned/aws-sdk-v3/lambda.test.js
@@ -37,7 +37,7 @@ function checkEntityLinkingSegments({ operations, tx, end }) {
'aws.region': 'us-east-1',
'aws.service': String,
'cloud.resource_id': `arn:aws:lambda:${attrs['aws.region']}:${accountId}:function:${testFunctionName}`,
- 'cloud.platform': `aws_lambda`
+ 'cloud.platform': 'aws_lambda'
})
})
end()
diff --git a/test/versioned/aws-sdk-v3/lib-dynamodb.test.js b/test/versioned/aws-sdk-v3/lib-dynamodb.test.js
index 032dbc5736..c85bf1da3a 100644
--- a/test/versioned/aws-sdk-v3/lib-dynamodb.test.js
+++ b/test/versioned/aws-sdk-v3/lib-dynamodb.test.js
@@ -155,10 +155,10 @@ function finish(end, tests, tx) {
const attrs = segment.attributes.get(common.SEGMENT_DESTINATION)
attrs.port_path_or_id = parseInt(attrs.port_path_or_id, 10)
match(attrs, {
- 'host': String,
- 'port_path_or_id': Number,
- 'product': 'DynamoDB',
- 'collection': String,
+ host: String,
+ port_path_or_id: Number,
+ product: 'DynamoDB',
+ collection: String,
'aws.operation': operation,
'aws.requestId': String,
'aws.region': 'us-east-1',
diff --git a/test/versioned/aws-sdk-v3/sns.test.js b/test/versioned/aws-sdk-v3/sns.test.js
index 9f6b679143..3e58a839f8 100644
--- a/test/versioned/aws-sdk-v3/sns.test.js
+++ b/test/versioned/aws-sdk-v3/sns.test.js
@@ -220,7 +220,7 @@ function finish(end, tx, destName, setLibrarySpy) {
'aws.requestId': String,
'aws.service': /sns|SNS/,
'aws.region': 'us-east-1'
- }),
- assert.equal(setLibrarySpy.callCount, 1, 'should only call setLibrary once and not per call')
+ })
+ assert.equal(setLibrarySpy.callCount, 1, 'should only call setLibrary once and not per call')
end()
}
diff --git a/test/versioned/bluebird/common-tests.js b/test/versioned/bluebird/common-tests.js
index b9dcc5178b..2b4e0556b7 100644
--- a/test/versioned/bluebird/common-tests.js
+++ b/test/versioned/bluebird/common-tests.js
@@ -385,8 +385,7 @@ function testFinallyBehavior(methodName) {
name + 'should pass values beyond ' + methodName + ' handler'
)
throw new Error('Promise#' + methodName + ' test error')
- })
- [methodName](function () {
+ })[methodName](function () {
plan.equal(arguments.length, 0, name + 'should not receive any parameters')
plan.ok(1, name + 'should go into ' + methodName + ' handler from rejected promise')
})
@@ -487,8 +486,7 @@ function testAsCallbackBehavior(methodName) {
})
.then(function () {
plan.ok(0, name + 'should have skipped then after rejection')
- })
- [methodName](function (err, result) {
+ })[methodName](function (err, result) {
const inCallbackTransaction = agent.getTransaction()
plan.equal(
id(startTransaction),
@@ -538,8 +536,7 @@ function testCatchBehavior(methodName) {
})
.then(function () {
throw new Error('Promise#' + methodName + ' test error')
- })
- [methodName](function (err) {
+ })[methodName](function (err) {
plan.ok(err, name + 'should pass error into rejection handler')
plan.equal(
err.message,
diff --git a/test/versioned/bluebird/methods.test.js b/test/versioned/bluebird/methods.test.js
index e090c09325..6ad2caea20 100644
--- a/test/versioned/bluebird/methods.test.js
+++ b/test/versioned/bluebird/methods.test.js
@@ -4,6 +4,10 @@
*/
'use strict'
+
+// Some tests in this file need to assert that we handle non-error rejections:
+/* eslint-disable prefer-promise-reject-errors */
+
const assert = require('node:assert')
const test = require('node:test')
const semver = require('semver')
@@ -136,7 +140,7 @@ test('new Promise()', async function (t) {
await testPromiseContext({
t,
factory: function (Promise, name) {
- return new Promise((resolve) => resolve(name))
+ return Promise.resolve(name)
}
})
})
@@ -288,7 +292,7 @@ test('Promise.coroutine', async function (t) {
await testPromiseContext({
t,
factory: function (Promise, name) {
- return Promise.coroutine(function* (_name) {
+ return Promise.coroutine(function * (_name) {
for (let i = 0; i < 10; ++i) {
yield Promise.delay(5)
}
@@ -315,7 +319,7 @@ test('Promise.coroutine', async function (t) {
})
}, 'should be able to add yield handler')
- return Promise.coroutine(function* (_name) {
+ return Promise.coroutine(function * (_name) {
for (let i = 0; i < 10; ++i) {
yield Promise.delay(5)
++count
@@ -902,7 +906,7 @@ test('Promise#bind', async function (t) {
await testPromiseContext({
t,
factory: function (Promise, name) {
- return Promise.resolve(name).bind({ name: name })
+ return Promise.resolve(name).bind({ name })
}
})
diff --git a/test/versioned/bunyan/helpers.js b/test/versioned/bunyan/helpers.js
index 51332a1147..f6b13b90fa 100644
--- a/test/versioned/bunyan/helpers.js
+++ b/test/versioned/bunyan/helpers.js
@@ -46,8 +46,10 @@ helpers.logStuff = function logStuff({ logger, helper, agent }) {
* local log decoration is enabled. Local log decoration asserts `NR-LINKING` string exists on msg
*
* @param {Object} opts
- * @param {boolean} [opts.includeLocalDecorating=false] is local log decoration enabled
- * @param {string} [opts.level=info] level to assert is on message
+ * @param {boolean} [opts.includeLocalDecorating] is local log decoration enabled
+ * @param {string} [opts.level] level to assert is on message
+ * @param opts.logLine
+ * @param opts.hostname
*/
helpers.originalMsgAssertion = function originalMsgAssertion({
includeLocalDecorating = false,
diff --git a/test/versioned/cassandra-driver/query.test.js b/test/versioned/cassandra-driver/query.test.js
index 688809d8b9..cf0f61da58 100644
--- a/test/versioned/cassandra-driver/query.test.js
+++ b/test/versioned/cassandra-driver/query.test.js
@@ -99,7 +99,7 @@ test('executeBatch - callback style', (t, end) => {
assert.ok(transaction, 'transaction should be visible')
assert.equal(tx, transaction, 'we got the same transaction')
- client.batch(insArr, { hints: hints }, (error, ok) => {
+ client.batch(insArr, { hints }, (error, ok) => {
assert.ifError(error, 'should not get an error')
assert.ok(agent.getTransaction(), 'transaction should still be visible')
@@ -135,7 +135,7 @@ test('executeBatch - promise style', (t, end) => {
assert.equal(tx, transaction, 'we got the same transaction')
client
- .batch(insArr, { hints: hints })
+ .batch(insArr, { hints })
.then(() => {
client
.execute(selQuery)
@@ -172,7 +172,7 @@ test('executeBatch - slow query', (t, end) => {
assert.ok(transaction, 'transaction should be visible')
assert.equal(tx, transaction, 'We got the same transaction')
- client.batch(insArr, { hints: hints }, (error, ok) => {
+ client.batch(insArr, { hints }, (error, ok) => {
assert.ifError(error, 'should not get an error')
const slowQuery = `SELECT * FROM ${KS}.${FAM}`
@@ -208,7 +208,7 @@ function checkMetric(agent, scoped) {
}
for (const expectedMetric in expected) {
- if (expected.hasOwnProperty(expectedMetric)) {
+ if (Object.prototype.hasOwnProperty.call(expected, expectedMetric)) {
const count = expected[expectedMetric]
const metric = agentMetrics[scoped ? 'scoped' : 'unscoped'][expectedMetric]
diff --git a/test/versioned/connect/route.test.js b/test/versioned/connect/route.test.js
index 46c5196b42..d6525ce692 100644
--- a/test/versioned/connect/route.test.js
+++ b/test/versioned/connect/route.test.js
@@ -107,6 +107,9 @@ test('should default to `/` when no route is specified', async (t) => {
* @param {string} params.expectedData expected response data
* @param {Object} app connect app
*
+ * @param params.plan
+ * @param params.app
+ * @param params.pkgVersion
* @returns {http.Server}
*/
function createServerAndMakeRequest({ url, expectedData, plan, app, pkgVersion }) {
diff --git a/test/versioned/elastic/elasticsearch.test.js b/test/versioned/elastic/elasticsearch.test.js
index 02253b0aa1..74bbed80ee 100644
--- a/test/versioned/elastic/elasticsearch.test.js
+++ b/test/versioned/elastic/elasticsearch.test.js
@@ -6,6 +6,7 @@
'use strict'
const test = require('node:test')
const assert = require('node:assert')
+const path = require('node:path')
const helper = require('../../lib/agent_helper')
const params = require('../../lib/params')
const urltils = require('../../../lib/util/urltils')
@@ -52,7 +53,7 @@ test('Elasticsearch instrumentation', async (t) => {
t.beforeEach(async (ctx) => {
// Determine version. ElasticSearch v7 did not export package, so we have to read the file
// instead of requiring it, as we can with 8+.
- const pkg = await readFile(`${__dirname}/node_modules/@elastic/elasticsearch/package.json`)
+ const pkg = await readFile(path.join(__dirname, '/node_modules/@elastic/elasticsearch/package.json'))
const { version: pkgVersion } = JSON.parse(pkg.toString())
const agent = helper.instrumentMockedAgent()
diff --git a/test/versioned/express-esm/segments.test.mjs b/test/versioned/express-esm/segments.test.mjs
index b85e5feadd..b200e66de3 100644
--- a/test/versioned/express-esm/segments.test.mjs
+++ b/test/versioned/express-esm/segments.test.mjs
@@ -161,7 +161,7 @@ test('each handler in route has its own segment', async (t) => {
test('segments for routers', async (t) => {
const { agent, app, express, server } = t.nr
- const router = express.Router() // eslint-disable-line new-cap
+ const router = express.Router()
router.all('/test', function (req, res) {
res.end()
})
@@ -188,13 +188,13 @@ test('segments for routers', async (t) => {
test('two root routers', async (t) => {
const { agent, app, express, server } = t.nr
- const router1 = express.Router() // eslint-disable-line new-cap
+ const router1 = express.Router()
router1.all('/', function (req, res) {
res.end()
})
app.use('/', router1)
- const router2 = express.Router() // eslint-disable-line new-cap
+ const router2 = express.Router()
router2.all('/test', function (req, res) {
res.end()
})
@@ -217,7 +217,7 @@ test('two root routers', async (t) => {
test('router mounted as a route handler', async (t) => {
const { agent, app, express, server } = t.nr
- const router1 = express.Router() // eslint-disable-line new-cap
+ const router1 = express.Router()
router1.all('/test', function testHandler(req, res) {
res.send('test')
})
@@ -259,7 +259,7 @@ test('router mounted as a route handler', async (t) => {
test('segments for routers', async (t) => {
const { agent, app, express, server } = t.nr
- const router = express.Router() // eslint-disable-line new-cap
+ const router = express.Router()
router.all('/test', function (req, res) {
res.end()
})
@@ -390,7 +390,7 @@ test('segments for wildcard', async (t) => {
test('router with subapp', async (t) => {
const { agent, app, express, server } = t.nr
- const router = express.Router() // eslint-disable-line new-cap
+ const router = express.Router()
const subapp = express()
subapp.all('/test', function (req, res) {
res.end()
@@ -472,7 +472,7 @@ test('error middleware', async (t) => {
test('error handler in router', async (t) => {
const { agent, app, express, server } = t.nr
- const router = express.Router() // eslint-disable-line new-cap
+ const router = express.Router()
router.get('/test', function () {
throw new Error('some error')
@@ -513,8 +513,8 @@ test('error handler in router', async (t) => {
test('error handler in second router', async (t) => {
const { agent, app, express, server } = t.nr
- const router1 = express.Router() // eslint-disable-line new-cap
- const router2 = express.Router() // eslint-disable-line new-cap
+ const router1 = express.Router()
+ const router2 = express.Router()
router2.get('/test', function () {
throw new Error('some error')
@@ -559,7 +559,7 @@ test('error handler in second router', async (t) => {
test('error handler outside of router', async (t) => {
const { agent, app, express, server } = t.nr
- const router = express.Router() // eslint-disable-line new-cap
+ const router = express.Router()
router.get('/test', function () {
throw new Error('some error')
@@ -596,8 +596,8 @@ test('error handler outside of router', async (t) => {
test('error handler outside of two routers', async (t) => {
const { agent, app, express, server } = t.nr
- const router1 = express.Router() // eslint-disable-line new-cap
- const router2 = express.Router() // eslint-disable-line new-cap
+ const router1 = express.Router()
+ const router2 = express.Router()
router1.use('/router2', router2)
diff --git a/test/versioned/express-esm/transaction-naming.test.mjs b/test/versioned/express-esm/transaction-naming.test.mjs
index 8d94db031b..78f196e1dc 100644
--- a/test/versioned/express-esm/transaction-naming.test.mjs
+++ b/test/versioned/express-esm/transaction-naming.test.mjs
@@ -201,7 +201,7 @@ test('with error', async (t) => {
next(Error('some error'))
})
- app.use(function (err, req, res, next) {
+ app.use(function (_, req, res, next) {
res.status(500).end()
next()
})
@@ -216,7 +216,7 @@ test('with error and path-specific error handler', async (t) => {
throw new Error('some error')
})
- app.use('/path1', function (err, req, res, next) {
+ app.use('/path1', function (_, req, res, next) {
res.status(500).end()
next()
})
@@ -235,7 +235,7 @@ test('when router error is handled outside of the router', async (t) => {
app.use('/router1', router)
- app.use(function (err, req, res, next) {
+ app.use(function (_, req, res, next) {
res.status(500).end()
next()
})
@@ -277,7 +277,7 @@ test('when using a regular expression in path', async (t) => {
test('when using router with a route variable', async (t) => {
const { agent, app, express, server } = t.nr
- const router = express.Router() // eslint-disable-line new-cap
+ const router = express.Router()
router.get('/:var2/path1', function (req, res) {
res.end()
@@ -304,8 +304,8 @@ test('when mounting a subapp using a variable', async (t) => {
test('using two routers', async (t) => {
const { agent, app, express, server } = t.nr
- const router1 = express.Router() // eslint-disable-line new-cap
- const router2 = express.Router() // eslint-disable-line new-cap
+ const router1 = express.Router()
+ const router2 = express.Router()
app.use('/:router1', router1)
router1.use('/:router2', router2)
@@ -324,8 +324,8 @@ test('using two routers', async (t) => {
test('transactions running in parallel should be recorded correctly', async (t) => {
const { agent, app, express, server } = t.nr
- const router1 = express.Router() // eslint-disable-line new-cap
- const router2 = express.Router() // eslint-disable-line new-cap
+ const router1 = express.Router()
+ const router2 = express.Router()
app.use('/:router1', router1)
router1.use('/:router2', router2)
@@ -353,7 +353,7 @@ test('transactions running in parallel should be recorded correctly', async (t)
})
test('names transaction when request is aborted', async (t) => {
- const plan = tspl(t, { plan: 4 })
+ const plan = tspl(t, { plan: 5 })
const { agent, app, server } = t.nr
let request = null
@@ -370,8 +370,8 @@ test('names transaction when request is aborted', async (t) => {
})
const promise = new Promise((resolve) => {
- // eslint-disable-next-line no-unused-vars
app.use(function (error, req, res, next) {
+ plan.equal(error.message, 'some error')
plan.ok(agent.getTransaction() == null, 'no active transaction when responding')
res.end()
resolve()
diff --git a/test/versioned/express/async-handlers.test.js b/test/versioned/express/async-handlers.test.js
index f350d38a19..da6cb2e24b 100644
--- a/test/versioned/express/async-handlers.test.js
+++ b/test/versioned/express/async-handlers.test.js
@@ -57,8 +57,7 @@ test('async handlers', { skip: !isExpress5() }, async (t) => {
app.use('/test', function handler() {
throw new Error('should not call handler on error')
})
- // eslint-disable-next-line no-unused-vars
- app.use(function (error, req, res, next) {
+ app.use(function (_, req, res, next) {
res.status(400).end()
})
diff --git a/test/versioned/express/bare-router.test.js b/test/versioned/express/bare-router.test.js
index 6620487144..1c9712f070 100644
--- a/test/versioned/express/bare-router.test.js
+++ b/test/versioned/express/bare-router.test.js
@@ -18,7 +18,7 @@ test.afterEach(teardown)
test('Express router introspection', async function (t) {
const { agent, app, port } = t.nr
- const plan = tsplan(t, { plan: 11 })
+ const plan = tsplan(t, { plan: 12 })
// need to capture parameters
agent.config.attributes.enabled = true
@@ -51,6 +51,7 @@ test('Express router introspection', async function (t) {
const url = 'http://localhost:' + port + '/test'
helper.makeGetRequest(url, { json: true }, function (error, res, body) {
+ plan.ifError(error)
plan.equal(res.statusCode, 200, 'nothing exploded')
plan.deepEqual(body, { status: 'ok' }, 'got expected response')
})
diff --git a/test/versioned/express/client-disconnect.test.js b/test/versioned/express/client-disconnect.test.js
index 8505abe891..6536ba5779 100644
--- a/test/versioned/express/client-disconnect.test.js
+++ b/test/versioned/express/client-disconnect.test.js
@@ -64,7 +64,7 @@ test('Client Premature Disconnection', { timeout: 3000 }, (t, end) => {
const request = http.request(
{
hostname: 'localhost',
- port: port,
+ port,
method: 'POST',
path: '/test',
headers: {
diff --git a/test/versioned/express/erk.js b/test/versioned/express/erk.js
index b596467c6c..4630bfe39d 100644
--- a/test/versioned/express/erk.js
+++ b/test/versioned/express/erk.js
@@ -25,7 +25,6 @@ helper.ranomPort(function (port) {
server.listen(port, function () {
process.on('message', function (code) {
helper.makeGetRequest('http://localhost:' + port + '/test/31337', function () {
- // eslint-disable-next-line no-process-exit
process.exit(code)
})
})
diff --git a/test/versioned/express/errors.test.js b/test/versioned/express/errors.test.js
index 7fbf08abc0..b19801b8da 100644
--- a/test/versioned/express/errors.test.js
+++ b/test/versioned/express/errors.test.js
@@ -4,6 +4,10 @@
*/
'use strict'
+
+// Make express quiet.
+process.env.NODE_ENV = 'test'
+
const assert = require('node:assert')
const http = require('http')
const test = require('node:test')
@@ -66,8 +70,7 @@ test('Error handling tests', async (t) => {
throw new Error('some error')
})
- // eslint-disable-next-line no-unused-vars
- app.use(function (error, req, res, next) {
+ app.use(function (_, req, res, next) {
res.end()
})
@@ -87,8 +90,7 @@ test('Error handling tests', async (t) => {
throw new Error('some error')
})
- // eslint-disable-next-line no-unused-vars
- app.use(function (error, req, res, next) {
+ app.use(function (_, req, res, next) {
res.status(400).end()
})
@@ -148,7 +150,7 @@ test('Error handling tests', async (t) => {
throw new Error('some error')
})
- app.use(function (err, req, res, next) {
+ app.use(function (_, req, res, next) {
next()
})
@@ -166,15 +168,14 @@ test('Error handling tests', async (t) => {
await t.test('should not report errors handled by errorware outside router', function (t, end) {
const { app, express } = t.nr
- const router1 = express.Router() // eslint-disable-line new-cap
+ const router1 = express.Router()
router1.get('/test', function () {
throw new Error('some error')
})
app.use(router1)
- // eslint-disable-next-line no-unused-vars
- app.use(function (error, req, res, next) {
+ app.use(function (_, req, res, next) {
res.end()
})
@@ -186,7 +187,7 @@ test('Error handling tests', async (t) => {
})
await t.test('does not error when request is aborted', async function (t) {
- const plan = tsplan(t, { plan: 4 })
+ const plan = tsplan(t, { plan: 5 })
const { app, agent, port } = t.nr
let request = null
@@ -201,8 +202,8 @@ test('Error handling tests', async (t) => {
}, 100)
})
- // eslint-disable-next-line no-unused-vars
app.use(function (error, req, res, next) {
+ plan.equal(error.message, 'some error')
plan.equal(agent.getTransaction(), null, 'no active transaction when responding')
res.end()
})
diff --git a/test/versioned/express/express-enrouten.test.js b/test/versioned/express/express-enrouten.test.js
index 40beac47f1..fbd35ce984 100644
--- a/test/versioned/express/express-enrouten.test.js
+++ b/test/versioned/express/express-enrouten.test.js
@@ -22,7 +22,7 @@ test.afterEach(teardown)
test('Express + express-enrouten compatibility test', { skip: isExpress5() }, async function (t) {
const { app, port } = t.nr
- const plan = tsplan(t, { plan: 2 })
+ const plan = tsplan(t, { plan: 4 })
const enrouten = require('express-enrouten')
app.use(enrouten({ directory: './fixtures' }))
@@ -30,10 +30,12 @@ test('Express + express-enrouten compatibility test', { skip: isExpress5() }, as
// New Relic + express-enrouten used to have a bug, where any routes after the
// first one would be lost.
helper.makeGetRequest('http://localhost:' + port + '/', function (error, res) {
+ plan.ifError(error)
plan.equal(res.statusCode, 200, 'First Route loaded')
})
helper.makeGetRequest('http://localhost:' + port + '/foo', function (error, res) {
+ plan.ifError(error)
plan.equal(res.statusCode, 200, 'Second Route loaded')
})
await plan.completed
diff --git a/test/versioned/express/fixtures/index.js b/test/versioned/express/fixtures/index.js
index a8cb9419f3..e1c3d3b077 100644
--- a/test/versioned/express/fixtures/index.js
+++ b/test/versioned/express/fixtures/index.js
@@ -7,6 +7,7 @@
/**
* Created by lmarkus on 11/24/14.
+ * @param router
*/
const routes = function (router) {
router.get('/', function (req, res) {
diff --git a/test/versioned/express/ignoring.test.js b/test/versioned/express/ignoring.test.js
index feeecd9128..dfa0186c9f 100644
--- a/test/versioned/express/ignoring.test.js
+++ b/test/versioned/express/ignoring.test.js
@@ -19,7 +19,7 @@ test.afterEach(teardown)
test('ignoring an Express route', async function (t) {
const { agent, app, port, isExpress5 } = t.nr
- const plan = tsplan(t, { plan: 7 })
+ const plan = tsplan(t, { plan: 8 })
const api = new API(agent)
@@ -36,7 +36,14 @@ test('ignoring an Express route', async function (t) {
const metrics = agent.metrics._metrics.unscoped
// loading k2 adds instrumentation metrics for things it loads
- const expectedMetrics = helper.isSecurityAgentEnabled(agent) ? (isExpress5 ? 13 : 11) : 3
+ let expectedMetrics = 3
+ if (helper.isSecurityAgentEnabled(agent) === true) {
+ if (isExpress5 === true) {
+ expectedMetrics = 13
+ } else {
+ expectedMetrics = 11
+ }
+ }
plan.equal(
Object.keys(metrics).length,
expectedMetrics,
@@ -54,6 +61,7 @@ test('ignoring an Express route', async function (t) {
const url = 'http://localhost:' + port + '/polling/31337'
helper.makeGetRequest(url, function (error, res, body) {
+ plan.ifError(error)
plan.equal(res.statusCode, 400, 'got expected error')
plan.deepEqual(body, { status: 'pollpollpoll' }, 'got expected response')
})
diff --git a/test/versioned/express/issue171.test.js b/test/versioned/express/issue171.test.js
index 6169120abc..782228a3e3 100644
--- a/test/versioned/express/issue171.test.js
+++ b/test/versioned/express/issue171.test.js
@@ -20,7 +20,6 @@ test("adding 'handle' middleware", async function (t) {
const { app, port } = t.nr
const plan = tsplan(t, { plan: 2 })
- // eslint-disable-next-line no-unused-vars
function handle(err, req, res, next) {
plan.ok(err, 'error should exist')
@@ -35,7 +34,7 @@ test("adding 'handle' middleware", async function (t) {
app.use(handle)
http
- .request({ port: port }, function (res) {
+ .request({ port }, function (res) {
// drain response to let process exit
res.pipe(process.stderr)
diff --git a/test/versioned/express/render.test.js b/test/versioned/express/render.test.js
index 49648e6db3..2d4ac4cac3 100644
--- a/test/versioned/express/render.test.js
+++ b/test/versioned/express/render.test.js
@@ -9,6 +9,7 @@
process.env.NODE_ENV = 'test'
const assert = require('node:assert')
const test = require('node:test')
+const path = require('node:path')
const helper = require('../../lib/agent_helper')
const API = require('../../../api')
const symbols = require('../../../lib/symbols')
@@ -32,7 +33,7 @@ const BODY =
// https://github.com/newrelic/node-newrelic/pull/154
test('using only the express router', function (t, end) {
const agent = helper.instrumentMockedAgent()
- const router = require('express').Router() // eslint-disable-line new-cap
+ const router = require('express').Router()
t.after(() => {
helper.unloadAgent(agent)
})
@@ -47,7 +48,7 @@ test('using only the express router', function (t, end) {
test('the express router should go through a whole request lifecycle', async function (t) {
const agent = helper.instrumentMockedAgent()
- const router = require('express').Router() // eslint-disable-line new-cap
+ const router = require('express').Router()
const finalhandler = require('finalhandler')
const plan = tsplan(t, { plan: 2 })
@@ -170,7 +171,7 @@ test('agent instrumentation of Express', async function (t) {
await t.test('using EJS templates', { timeout: 1000 }, async function (t) {
const plan = tsplan(t, { plan: 4 })
const { app, agent, port } = t.nr
- app.set('views', __dirname + '/views')
+ app.set('views', path.join(__dirname, 'views'))
app.set('view engine', 'ejs')
app.get(TEST_PATH, function (req, res) {
@@ -201,7 +202,7 @@ test('agent instrumentation of Express', async function (t) {
agent.config.browser_monitoring.browser_key = '12345'
agent.config.browser_monitoring.js_agent_loader = 'function() {}'
- app.set('views', __dirname + '/views')
+ app.set('views', path.join(__dirname, 'views'))
app.set('view engine', 'ejs')
app.get(TEST_PATH, function (req, res) {
@@ -396,6 +397,7 @@ test('agent instrumentation of Express', async function (t) {
const { agent, app, port } = t.nr
app.get(TEST_PATH, function () {
+ // eslint-disable-next-line no-throw-literal
throw 'some error'
})
@@ -417,8 +419,7 @@ test('agent instrumentation of Express', async function (t) {
throw new Error('some error')
})
- // eslint-disable-next-line no-unused-vars
- app.use(function (err, rer, res, next) {
+ app.use(function (_, rer, res, next) {
res.status(400).end()
})
@@ -446,7 +447,6 @@ test('agent instrumentation of Express', async function (t) {
throw error
})
- // eslint-disable-next-line no-unused-vars
app.use(function (err, rer, res, next) {
delete err.message
delete err.stack
@@ -493,7 +493,7 @@ test('agent instrumentation of Express', async function (t) {
await t.test('layer wrapping', async function (t) {
const { app, port } = t.nr
- const plan = tsplan(t, { plan: 1 })
+ const plan = tsplan(t, { plan: 2 })
// Add our route.
app.get(TEST_PATH, function (req, res) {
res.send('bar')
@@ -506,6 +506,7 @@ test('agent instrumentation of Express', async function (t) {
// Make our request.
helper.makeGetRequest(`${TEST_URL}:${port}${TEST_PATH}`, function (err, response, body) {
+ plan.ifError(err)
plan.equal(body, 'bar', 'should not fail with a proxy layer')
})
await plan.completed
@@ -518,7 +519,7 @@ test('agent instrumentation of Express', async function (t) {
*
* @param {express.Layer} layer - The layer to proxy.
*
- * @return {object} A POD object with all the fields of the layer copied over.
+ * @returns {object} A POD object with all the fields of the layer copied over.
*/
function makeProxyLayer(layer) {
const fakeLayer = {
diff --git a/test/versioned/express/router-params.test.js b/test/versioned/express/router-params.test.js
index 775fa507a2..a273a9c84b 100644
--- a/test/versioned/express/router-params.test.js
+++ b/test/versioned/express/router-params.test.js
@@ -25,7 +25,7 @@ test('Express router introspection', async function (t) {
const { agent, app, express, port } = t.nr
const plan = tsplan(t, { plan: 14 })
- const router = express.Router() // eslint-disable-line new-cap
+ const router = express.Router()
router.get('/b/:param2', function (req, res) {
plan.ok(agent.getTransaction(), 'transaction is available')
diff --git a/test/versioned/express/segments.test.js b/test/versioned/express/segments.test.js
index 8ebef17990..7eb98b0bd5 100644
--- a/test/versioned/express/segments.test.js
+++ b/test/versioned/express/segments.test.js
@@ -161,7 +161,7 @@ test('each handler in route has its own segment', function (t, end) {
test('segments for routers', function (t, end) {
const { app, express } = t.nr
- const router = express.Router() // eslint-disable-line new-cap
+ const router = express.Router()
router.all('/test', function (req, res) {
res.end()
})
@@ -191,13 +191,13 @@ test('segments for routers', function (t, end) {
test('two root routers', function (t, end) {
const { app, express } = t.nr
- const router1 = express.Router() // eslint-disable-line new-cap
+ const router1 = express.Router()
router1.all('/', function (req, res) {
res.end()
})
app.use('/', router1)
- const router2 = express.Router() // eslint-disable-line new-cap
+ const router2 = express.Router()
router2.all('/test', function (req, res) {
res.end()
})
@@ -223,7 +223,7 @@ test('two root routers', function (t, end) {
test('router mounted as a route handler', function (t, end) {
const { app, express, isExpress5 } = t.nr
- const router1 = express.Router() // eslint-disable-line new-cap
+ const router1 = express.Router()
router1.all('/test', function testHandler(req, res) {
res.send('test')
})
@@ -267,7 +267,7 @@ test('router mounted as a route handler', function (t, end) {
test('segments for routers', function (t, end) {
const { app, express } = t.nr
- const router = express.Router() // eslint-disable-line new-cap
+ const router = express.Router()
router.all('/test', function (req, res) {
res.end()
})
@@ -408,7 +408,7 @@ test('segments for wildcard', function (t, end) {
test('router with subapp', function (t, end) {
const { app, express, isExpress5 } = t.nr
- const router = express.Router() // eslint-disable-line new-cap
+ const router = express.Router()
const subapp = express()
subapp.all('/test', function (req, res) {
res.end()
@@ -498,7 +498,7 @@ test('error middleware', function (t, end) {
test('error handler in router', function (t, end) {
const { app, express } = t.nr
- const router = express.Router() // eslint-disable-line new-cap
+ const router = express.Router()
router.get('/test', function () {
throw new Error('some error')
@@ -515,7 +515,7 @@ test('error handler in router', function (t, end) {
runTest(
t,
{
- endpoint: endpoint,
+ endpoint,
errors: 0
},
function (segments, transaction) {
@@ -549,8 +549,8 @@ test('error handler in router', function (t, end) {
test('error handler in second router', function (t, end) {
const { app, express } = t.nr
- const router1 = express.Router() // eslint-disable-line new-cap
- const router2 = express.Router() // eslint-disable-line new-cap
+ const router1 = express.Router()
+ const router2 = express.Router()
router2.get('/test', function () {
throw new Error('some error')
@@ -568,7 +568,7 @@ test('error handler in second router', function (t, end) {
runTest(
t,
{
- endpoint: endpoint,
+ endpoint,
errors: 0
},
function (segments, transaction) {
@@ -605,7 +605,7 @@ test('error handler in second router', function (t, end) {
test('error handler outside of router', function (t, end) {
const { app, express } = t.nr
- const router = express.Router() // eslint-disable-line new-cap
+ const router = express.Router()
router.get('/test', function () {
throw new Error('some error')
@@ -621,7 +621,7 @@ test('error handler outside of router', function (t, end) {
runTest(
t,
{
- endpoint: endpoint,
+ endpoint,
errors: 0
},
function (segments, transaction) {
@@ -652,8 +652,8 @@ test('error handler outside of router', function (t, end) {
test('error handler outside of two routers', function (t, end) {
const { app, express } = t.nr
- const router1 = express.Router() // eslint-disable-line new-cap
- const router2 = express.Router() // eslint-disable-line new-cap
+ const router1 = express.Router()
+ const router2 = express.Router()
router1.use('/router2', router2)
@@ -671,7 +671,7 @@ test('error handler outside of two routers', function (t, end) {
runTest(
t,
{
- endpoint: endpoint,
+ endpoint,
errors: 0
},
function (segments, transaction) {
diff --git a/test/versioned/express/transaction-naming.test.js b/test/versioned/express/transaction-naming.test.js
index eca3baf371..dade6c9fc2 100644
--- a/test/versioned/express/transaction-naming.test.js
+++ b/test/versioned/express/transaction-naming.test.js
@@ -4,6 +4,10 @@
*/
'use strict'
+
+// Make express quiet.
+process.env.NODE_ENV = 'test'
+
const assert = require('node:assert')
const http = require('http')
const test = require('node:test')
@@ -184,7 +188,7 @@ test('with error', function (t, end) {
next(new Error('some error'))
})
- app.use(function (err, req, res) {
+ app.use(function (_, req, res) {
return res.status(500).end()
})
@@ -216,8 +220,7 @@ test('when router error is handled outside of the router', function (t, end) {
app.use('/router1', router)
- // eslint-disable-next-line no-unused-vars
- app.use(function (err, req, res, next) {
+ app.use(function (_, req, res, next) {
return res.status(500).end()
})
@@ -259,7 +262,7 @@ test('when using a regular expression in path', function (t, end) {
test('when using router with a route variable', function (t, end) {
const { app, express } = t.nr
- const router = express.Router() // eslint-disable-line new-cap
+ const router = express.Router()
router.get('/:var2/path1', function (req, res) {
res.end()
@@ -286,8 +289,8 @@ test('when mounting a subapp using a variable', function (t, end) {
test('using two routers', function (t, end) {
const { app, express } = t.nr
- const router1 = express.Router() // eslint-disable-line new-cap
- const router2 = express.Router() // eslint-disable-line new-cap
+ const router1 = express.Router()
+ const router2 = express.Router()
app.use('/:router1', router1)
router1.use('/:router2', router2)
@@ -301,8 +304,8 @@ test('using two routers', function (t, end) {
test('transactions running in parallel should be recorded correctly', function (t, end) {
const { app, express } = t.nr
- const router1 = express.Router() // eslint-disable-line new-cap
- const router2 = express.Router() // eslint-disable-line new-cap
+ const router1 = express.Router()
+ const router2 = express.Router()
app.use('/:router1', router1)
router1.use('/:router2', router2)
@@ -328,7 +331,7 @@ test('transactions running in parallel should be recorded correctly', function (
})
test('names transaction when request is aborted', async function (t) {
- const plan = tsplan(t, { plan: 5 })
+ const plan = tsplan(t, { plan: 6 })
const { agent, app, port } = t.nr
@@ -345,8 +348,8 @@ test('names transaction when request is aborted', async function (t) {
}, 100)
})
- // eslint-disable-next-line no-unused-vars
app.use(function (error, req, res, next) {
+ plan.equal(error.message, 'some error')
plan.ok(agent.getTransaction() == null, 'no active transaction when responding')
res.end()
})
@@ -386,7 +389,6 @@ test('Express transaction names are unaffected by errorware', async function (t)
throw new Error('endpoint error')
})
- // eslint-disable-next-line no-unused-vars
app.use('/test', function (err, req, res, next) {
res.send(err.message)
})
@@ -442,7 +444,7 @@ test('when next is called after transaction state loss', async function (t) {
})
// Send first request to `/foo` which is slow and uses the work queue.
- http.get({ port: port, path: '/foo' }, function (res) {
+ http.get({ port, path: '/foo' }, function (res) {
res.resume()
res.on('end', function () {
plan.equal(transactionsFinished, 2, 'should have two transactions done')
@@ -452,7 +454,7 @@ test('when next is called after transaction state loss', async function (t) {
// Send the second request after a short wait `/bar` which is fast and
// does not use the work queue.
setTimeout(function () {
- http.get({ port: port, path: '/bar' }, function (res) {
+ http.get({ port, path: '/bar' }, function (res) {
res.resume()
})
}, 100)
diff --git a/test/versioned/fastify/add-hook.test.js b/test/versioned/fastify/add-hook.test.js
index cecf24da28..97020e9e5a 100644
--- a/test/versioned/fastify/add-hook.test.js
+++ b/test/versioned/fastify/add-hook.test.js
@@ -81,7 +81,7 @@ test('non-error hooks', async (t) => {
assert.equal(
'WebFrameworkUri/Fastify/GET//add-hook',
transaction.getName(),
- `transaction name matched`
+ 'transaction name matched'
)
// all the hooks are siblings of the route handler
// except the AFTER_HANDLER_HOOKS which are children of the route handler
@@ -143,7 +143,7 @@ test('error hook', async function errorHookTest(t) {
assert.equal(
'WebFrameworkUri/Fastify/GET//error',
transaction.getName(),
- `transaction name matched`
+ 'transaction name matched'
)
// all the hooks are siblings of the route handler
let expectedSegments
diff --git a/test/versioned/fastify/common.js b/test/versioned/fastify/common.js
index 79330bf3c2..17a4eb1407 100644
--- a/test/versioned/fastify/common.js
+++ b/test/versioned/fastify/common.js
@@ -78,7 +78,6 @@ common.setupRoutes = (fastify) => {
* values of params
*/
fastify.get('/params/:id/:parent/edit', async (request) => {
- /* eslint-disable-next-line node/no-unsupported-features/es-syntax */
return { ...request.params }
})
}
@@ -87,6 +86,9 @@ common.setupRoutes = (fastify) => {
* Defines both a global middleware and middleware mounted at a specific
* path. This tests the `middie` and/or `fastify-express` plugin middleware
* instrumentation
+ * @param root0
+ * @param root0.fastify
+ * @param root0.calls
*/
common.registerMiddlewares = ({ fastify, calls }) => {
function testMiddleware(req, res, next) {
@@ -108,8 +110,11 @@ common.registerMiddlewares = ({ fastify, calls }) => {
/**
* Helper to make a request and parse the json body
*
+ * @param address.address
* @param {Object} address fastify address contains address/port/family
+ * @param address.port
* @param {string} uri to make request to
+ * @param address.family
* @returns {Object} parsed json body
*/
common.makeRequest = async ({ address, port, family }, uri) => {
diff --git a/test/versioned/fastify/errors.test.js b/test/versioned/fastify/errors.test.js
index 71d50c05f8..11c9f00574 100644
--- a/test/versioned/fastify/errors.test.js
+++ b/test/versioned/fastify/errors.test.js
@@ -32,7 +32,7 @@ test('Test Errors', async (t) => {
fastify.use((req, res, next) => {
const err = new Error('Not found')
- // eslint-disable-next-line new-cap
+
err.status = 404
next(err)
})
diff --git a/test/versioned/generic-pool/basic.test.js b/test/versioned/generic-pool/basic.test.js
index 0054d2c75f..72eb510889 100644
--- a/test/versioned/generic-pool/basic.test.js
+++ b/test/versioned/generic-pool/basic.test.js
@@ -53,13 +53,13 @@ test('instantiation', (t) => {
plan.doesNotThrow(function () {
const p = pool.createPool({
create: function () {
- return new Promise(function (res) {
- addTask(res, {})
+ return new Promise(function (resolve) {
+ addTask(resolve, {})
})
},
destroy: function () {
- return new Promise(function (res) {
- addTask(res)
+ return new Promise(function (resolve) {
+ addTask(resolve)
})
}
})
@@ -72,13 +72,13 @@ test('context maintenance', (t, end) => {
const p = pool.createPool(
{
create: function () {
- return new Promise(function (res) {
- addTask(res, {})
+ return new Promise(function (resolve) {
+ addTask(resolve, {})
})
},
destroy: function () {
- return new Promise(function (res) {
- addTask(res)
+ return new Promise(function (resolve) {
+ addTask(resolve)
})
}
},
diff --git a/test/versioned/grpc-esm/client-unary.test.mjs b/test/versioned/grpc-esm/client-unary.test.mjs
index 1e6922e8cf..989bcc8f82 100644
--- a/test/versioned/grpc-esm/client-unary.test.mjs
+++ b/test/versioned/grpc-esm/client-unary.test.mjs
@@ -82,7 +82,7 @@ test('should include distributed trace headers when enabled', (t, end) => {
const dtMeta = server.metadataMap.get(payload.name)
match(
dtMeta.get('traceparent')[0],
- /^[\w\d\-]{55}$/,
+ /^[\w-]{55}$/,
'should have traceparent in server metadata'
)
assert.equal(dtMeta.get('newrelic')[0], '', 'should have newrelic in server metadata')
@@ -174,6 +174,7 @@ for (const config of grpcConfigs) {
test('should bind callback to the proper transaction context', (t, end) => {
helper.runInTransaction(agent, 'web', async (tx) => {
client.sayHello({ name: 'Callback' }, (err, response) => {
+ assert.ifError(err)
assert.ok(response)
assert.equal(response.message, 'Hello Callback')
assert.ok(agent.getTransaction(), 'callback should have transaction context')
diff --git a/test/versioned/grpc-esm/server-unary.test.mjs b/test/versioned/grpc-esm/server-unary.test.mjs
index 5681aa5755..f5a8f85cef 100644
--- a/test/versioned/grpc-esm/server-unary.test.mjs
+++ b/test/versioned/grpc-esm/server-unary.test.mjs
@@ -200,7 +200,7 @@ for (const config of grpcConfigs) {
fnName: 'sayError',
payload: { oh: 'noes' }
})
- } catch (err) {
+ } catch {
// err tested in client tests
}
diff --git a/test/versioned/grpc/client-bidi-streaming.test.js b/test/versioned/grpc/client-bidi-streaming.test.js
index 5bee1a17db..f209a966cd 100644
--- a/test/versioned/grpc/client-bidi-streaming.test.js
+++ b/test/versioned/grpc/client-bidi-streaming.test.js
@@ -80,7 +80,7 @@ test('should include distributed trace headers when enabled', (t, end) => {
const dtMeta = server.metadataMap.get(payload[0].name)
match(
dtMeta.get('traceparent')[0],
- /^[\w\d\-]{55}$/,
+ /^[\w-]{55}$/,
'should have traceparent in server metadata'
)
assert.equal(dtMeta.get('newrelic')[0], '', 'should have newrelic in server metadata')
diff --git a/test/versioned/grpc/client-server-streaming.test.js b/test/versioned/grpc/client-server-streaming.test.js
index 32c97f1747..ed79070a2b 100644
--- a/test/versioned/grpc/client-server-streaming.test.js
+++ b/test/versioned/grpc/client-server-streaming.test.js
@@ -75,7 +75,7 @@ test('should include distributed trace headers when enabled', (t, end) => {
const dtMeta = server.metadataMap.get(name)
match(
dtMeta.get('traceparent')[0],
- /^[\w\d\-]{55}$/,
+ /^[\w-]{55}$/,
'should have traceparent in server metadata'
)
assert.equal(dtMeta.get('newrelic')[0], '', 'should have newrelic in server metadata')
diff --git a/test/versioned/grpc/client-streaming.test.js b/test/versioned/grpc/client-streaming.test.js
index e6a5622d01..a77be3596f 100644
--- a/test/versioned/grpc/client-streaming.test.js
+++ b/test/versioned/grpc/client-streaming.test.js
@@ -78,7 +78,7 @@ test('should include distributed trace headers when enabled', (t, end) => {
const dtMeta = server.metadataMap.get(name)
match(
dtMeta.get('traceparent')[0],
- /^[\w\d\-]{55}$/,
+ /^[\w-]{55}$/,
'should have traceparent in server metadata'
)
assert.equal(dtMeta.get('newrelic')[0], '', 'should have newrelic in server metadata')
@@ -214,6 +214,7 @@ test('should bind callback to the proper transaction context', (t, end) => {
const { agent, client } = t.nr
helper.runInTransaction(agent, 'web', async (tx) => {
const call = client.sayHelloClientStream((err, response) => {
+ assert.ifError(err)
assert.ok(response)
assert.equal(response.message, 'Hello Callback')
assert.ok(agent.getTransaction(), 'callback should have transaction context')
diff --git a/test/versioned/grpc/client-unary.test.js b/test/versioned/grpc/client-unary.test.js
index 756679b203..8dc3817ddf 100644
--- a/test/versioned/grpc/client-unary.test.js
+++ b/test/versioned/grpc/client-unary.test.js
@@ -72,7 +72,7 @@ test('should include distributed trace headers when enabled', (t, end) => {
const dtMeta = server.metadataMap.get(payload.name)
match(
dtMeta.get('traceparent')[0],
- /^[\w\d\-]{55}$/,
+ /^[\w-]{55}$/,
'should have traceparent in server metadata'
)
assert.equal(dtMeta.get('newrelic')[0], '', 'should have newrelic in server metadata')
@@ -163,6 +163,7 @@ test('should bind callback to the proper transaction context', (t, end) => {
const { agent, client } = t.nr
helper.runInTransaction(agent, 'web', async (tx) => {
client.sayHello({ name: 'Callback' }, (err, response) => {
+ assert.ifError(err)
assert.ok(response)
assert.equal(response.message, 'Hello Callback')
assert.ok(agent.getTransaction(), 'callback should have transaction context')
diff --git a/test/versioned/grpc/server-bidi-streaming.test.js b/test/versioned/grpc/server-bidi-streaming.test.js
index 45d4e5b94d..ddd0e52e09 100644
--- a/test/versioned/grpc/server-bidi-streaming.test.js
+++ b/test/versioned/grpc/server-bidi-streaming.test.js
@@ -166,7 +166,7 @@ for (const config of grpcConfigs) {
try {
const payload = [{ name: 'server-error' }]
await makeBidiStreamingRequest({ client, fnName: 'sayErrorBidiStream', payload })
- } catch (err) {
+ } catch {
// err tested in client tests
}
diff --git a/test/versioned/grpc/server-client-streaming.test.js b/test/versioned/grpc/server-client-streaming.test.js
index 3b9a506eb6..c9aed692c5 100644
--- a/test/versioned/grpc/server-client-streaming.test.js
+++ b/test/versioned/grpc/server-client-streaming.test.js
@@ -170,7 +170,7 @@ for (const config of grpcConfigs) {
try {
const payload = [{ oh: 'noes' }]
await makeClientStreamingRequest({ client, fnName: 'sayErrorClientStream', payload })
- } catch (err) {
+ } catch {
// err tested in client tests
}
@@ -200,7 +200,7 @@ test('should not record errors if `grpc.record_errors` is disabled', async (t) =
try {
const payload = [{ oh: 'noes' }]
await makeClientStreamingRequest({ client, fnName: 'sayErrorClientStream', payload })
- } catch (err) {
+ } catch {
// err tested in client tests
}
assert.ok(transaction, 'transaction exists')
@@ -234,7 +234,7 @@ test('should record errors if `grpc.record_errors` is enabled and server sends e
payload,
endStream: false
})
- } catch (err) {
+ } catch {
// err tested in client tests
}
assert.ok(transaction, 'transaction exists')
diff --git a/test/versioned/grpc/server-streaming.test.js b/test/versioned/grpc/server-streaming.test.js
index 394f8c5be5..86fe684a9d 100644
--- a/test/versioned/grpc/server-streaming.test.js
+++ b/test/versioned/grpc/server-streaming.test.js
@@ -165,7 +165,7 @@ for (const config of grpcConfigs) {
try {
const payload = { name: ['noes'] }
await makeServerStreamingRequest({ client, fnName: 'sayErrorServerStream', payload })
- } catch (err) {
+ } catch {
// err tested in client tests
}
diff --git a/test/versioned/grpc/server-unary.test.js b/test/versioned/grpc/server-unary.test.js
index 8e6a7a8798..e1fa2f403d 100644
--- a/test/versioned/grpc/server-unary.test.js
+++ b/test/versioned/grpc/server-unary.test.js
@@ -176,7 +176,7 @@ for (const config of grpcConfigs) {
fnName: 'sayError',
payload: { oh: 'noes' }
})
- } catch (err) {
+ } catch {
// err tested in client tests
}
diff --git a/test/versioned/grpc/util.cjs b/test/versioned/grpc/util.cjs
index 2ae0864095..0e8205d0ec 100644
--- a/test/versioned/grpc/util.cjs
+++ b/test/versioned/grpc/util.cjs
@@ -5,6 +5,8 @@
'use strict'
+const path = require('node:path')
+
const util = module.exports
const metricsHelpers = require('../../lib/metrics_helper')
const protoLoader = require('@grpc/proto-loader')
@@ -36,6 +38,9 @@ function buildExpectedMetrics(port) {
*
* @param {Object} params
* @param {Object} params.agent test agent
+ * @param params.port
+ * @param root1
+ * @param root1.assert
*/
util.assertMetricsNotExisting = function assertMetricsNotExisting(
{ agent, port },
@@ -55,7 +60,7 @@ util.assertMetricsNotExisting = function assertMetricsNotExisting(
* @returns {Object} helloworld protobuf pkg
*/
function loadProtobufApi(grpc) {
- const PROTO_PATH = `${__dirname}/example.proto`
+ const PROTO_PATH = path.join(__dirname, 'example.proto')
const packageDefinition = protoLoader.loadSync(PROTO_PATH, {
keepCase: true,
longs: String,
@@ -99,6 +104,7 @@ util.createServer = async function createServer(grpc) {
*
* @param {Object} grpc grpc module
* @param {Object} proto protobuf API example.proto
+ * @param port
* @returns {Object} client grpc client for Greeter service
*/
util.getClient = function getClient(grpc, proto, port) {
@@ -133,8 +139,11 @@ util.getServerTransactionName = function getRPCName(fnName) {
* @param {Object} params
* @param {Object} params.tx transaction under test
* @param {string} params.fnName gRPC method name
- * @param {number} [params.expectedStatusCode=0] expected status code for test
- * @param {string} [params.expectedStatusText=OK] expected status text for test
+ * @param {number} [params.expectedStatusCode] expected status code for test
+ * @param {string} [params.expectedStatusText] expected status text for test
+ * @param params.port
+ * @param root1
+ * @param root1.assert
*/
util.assertExternalSegment = function assertExternalSegment(
{ tx, fnName, expectedStatusCode = 0, expectedStatusText = 'OK', port },
@@ -173,7 +182,10 @@ util.assertExternalSegment = function assertExternalSegment(
* @param {Object} params
* @param {Object} params.tx transaction under test
* @param {string} params.fnName gRPC method name
- * @param {number} [params.expectedStatusCode=0] expected status code for test
+ * @param {number} [params.expectedStatusCode] expected status code for test
+ * @param params.transaction
+ * @param root1
+ * @param root1.assert
*/
util.assertServerTransaction = function assertServerTransaction(
{ transaction, fnName, expectedStatusCode = 0 },
@@ -225,7 +237,7 @@ util.assertDistributedTracing = function assertDistributedTracing(
clientTransaction.id !== serverTransaction.id,
'should get different transactions for client and server'
)
- match(serverAttributes['request.headers.traceparent'], /^[\w\d\-]{55}$/, { assert })
+ match(serverAttributes['request.headers.traceparent'], /^[\w-]{55}$/, { assert })
assert.equal(serverAttributes['request.headers.newrelic'], '', 'should have the newrelic header')
assert.equal(
clientTransaction.traceId,
@@ -262,6 +274,7 @@ util.makeUnaryRequest = function makeUnaryRequest({ client, fnName, payload }) {
* @param {Object} params.client gRPC client
* @param {string} params.fnName gRPC method name
* @param {*} params.payload payload to gRPC method
+ * @param params.endStream
* @returns {Promise}
*/
util.makeClientStreamingRequest = function makeClientStreamingRequest({
@@ -350,12 +363,15 @@ util.makeBidiStreamingRequest = function makeBidiStreamingRequest({ client, fnNa
* @param {Object} params
* @param {Object} params.transaction transaction under test
* @param {Array} params.errors agent errors array
- * @param {boolean} [params.expectErrors=true] flag to indicate if errors will exist
- * @param {boolean} [params.clientError=false] flag to indicate if error is client side
+ * @param {boolean} [params.expectErrors] flag to indicate if errors will exist
+ * @param {boolean} [params.clientError] flag to indicate if error is client side
* @param {Array} params.agentMetrics agent metrics array
* @param {string} params.fnName gRPC method name
* @param {number} params.expectedStatusCode expected status code for test
* @param {string} params.expectedStatusText expected status text for test
+ * @param params.port
+ * @param root1
+ * @param root1.assert
*/
util.assertError = function assertError(
{
@@ -371,8 +387,16 @@ util.assertError = function assertError(
},
{ assert = require('node:assert') } = {}
) {
- // when testing client the transaction will contain both server and client information. so we need to extract the client error which is always the 2nd
- const errorLength = expectErrors ? (clientError ? 2 : 1) : 0
+ // when testing client the transaction will contain both server and client
+ // information. so we need to extract the client error which is always the 2nd
+ let errorLength = 0
+ if (expectErrors) {
+ if (clientError) {
+ errorLength = 2
+ } else {
+ errorLength = 1
+ }
+ }
assert.equal(errors.traceAggregator.errors.length, errorLength, `should be ${errorLength} errors`)
diff --git a/test/versioned/hapi/hapi.test.js b/test/versioned/hapi/hapi.test.js
index 0270c0cacd..de8f82c4c5 100644
--- a/test/versioned/hapi/hapi.test.js
+++ b/test/versioned/hapi/hapi.test.js
@@ -29,14 +29,14 @@ test('preserves server creation return', (t) => {
const { agent } = t.nr
const hapi = require('@hapi/hapi')
- const returned = utils.getServer({ hapi: hapi })
+ const returned = utils.getServer({ hapi })
assert.ok(returned != null, 'Hapi returns from server creation')
const shim = new shims.WebFrameworkShim(agent, 'hapi')
instrument(agent, hapi, 'hapi', shim)
- const returned2 = utils.getServer({ hapi: hapi })
+ const returned2 = utils.getServer({ hapi })
assert.ok(returned2 != null, 'Server creation returns when instrumented')
})
diff --git a/test/versioned/hapi/render.test.js b/test/versioned/hapi/render.test.js
index cbe3984c43..592ac1731c 100644
--- a/test/versioned/hapi/render.test.js
+++ b/test/versioned/hapi/render.test.js
@@ -152,7 +152,7 @@ test('should generate rum headers', { timeout: 1000 }, (t, end) => {
handler: function (req, h) {
const rum = api.getBrowserTimingHeader()
assert.equal(rum.substring(0, 7), '