[monitor] Uprade @azure/monitor-ingestion to ESM/vitest (#31615)

### Packages impacted by this PR

- @azure/monitor-ingestion

### Issues associated with this PR

- https://github.com/Azure/azure-sdk-for-js/issues/31338


### Describe the problem that is addressed by this PR

Updates to ESM/vitest for @azure/monitor-ingestion.

### What are the possible designs available to address the problem? If
there are more than one possible design, why was the one in this PR
chosen?


### Are there test cases added in this PR? _(If not, why?)_


### Provide a list of related PRs _(if any)_


### Command used to generate this PR:**_(Applicable only to SDK release
request PRs)_

### Checklists
- [ ] Added impacted package name to the issue description
- [ ] Does this PR needs any fixes in the SDK Generator?** _(If so,
create an Issue in the
[Autorest/typescript](https://github.com/Azure/autorest.typescript)
repository and link it here)_
- [ ] Added a changelog (if necessary)
This commit is contained in:
Matthew Podwysocki 2024-11-05 17:44:15 -05:00 коммит произвёл GitHub
Родитель 84896d1c3c
Коммит 14051308f6
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: B5690EEEBB952194
28 изменённых файлов: 203 добавлений и 278 удалений

Просмотреть файл

@ -21352,7 +21352,7 @@ packages:
dev: false
file:projects/monitor-ingestion.tgz:
resolution: {integrity: sha512-cRuHz8yQdwmhQN1s3AiWPL30Osvv2lQIkF3ojq1QIMkrijM8Ap5Gb7dYY7LTdcpOodiOsI3VZ70LrYWqGOGbnQ==, tarball: file:projects/monitor-ingestion.tgz}
resolution: {integrity: sha512-pllRWLfOMD+ngWvcILVdzLi1f1XtozTseyFDESOKh67IT3MbSBExuav0w6vQOk2H5E88cpYU8mh0c/kFn2/Iwg==, tarball: file:projects/monitor-ingestion.tgz}
name: '@rush-temp/monitor-ingestion'
version: 0.0.0
dependencies:
@ -21363,6 +21363,8 @@ packages:
'@types/node': 18.19.64
'@types/pako': 2.0.3
'@types/sinon': 17.0.3
'@vitest/browser': 2.1.4(@types/node@18.19.64)(playwright@1.48.2)(typescript@5.6.3)(vitest@2.1.4)
'@vitest/coverage-istanbul': 2.1.4(vitest@2.1.4)
chai: 4.3.10
dotenv: 16.4.5
eslint: 9.14.0
@ -21380,20 +21382,34 @@ packages:
mocha: 10.8.2
nyc: 17.1.0
pako: 2.1.0
playwright: 1.48.2
sinon: 17.0.1
source-map-support: 0.5.21
ts-node: 10.9.2(@types/node@18.19.64)(typescript@5.6.3)
tslib: 2.8.1
typescript: 5.6.3
util: 0.12.5
vitest: 2.1.4(@types/node@18.19.64)(@vitest/browser@2.1.4)
transitivePeerDependencies:
- '@swc/core'
- '@swc/wasm'
- '@edge-runtime/vm'
- '@vitest/ui'
- bufferutil
- debug
- happy-dom
- jiti
- jsdom
- less
- lightningcss
- msw
- safaridriver
- sass
- sass-embedded
- stylus
- sugarss
- supports-color
- terser
- utf-8-validate
- vite
- webdriverio
dev: false
file:projects/monitor-opentelemetry-exporter.tgz:

Просмотреть файл

@ -1,19 +0,0 @@
{
"include": [
"dist-esm/src/**/*.js"
],
"exclude": [
"**/*.d.ts",
"dist-esm/src/generated/*"
],
"reporter": [
"text-summary",
"html",
"cobertura"
],
"exclude-after-remap": false,
"sourceMap": true,
"produce-source-map": true,
"instrument": true,
"all": true
}

Просмотреть файл

@ -1,6 +1,6 @@
{
"$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json",
"mainEntryPointFilePath": "types/src/index.d.ts",
"mainEntryPointFilePath": "dist/esm/index.d.ts",
"docModel": {
"enabled": true
},
@ -11,7 +11,7 @@
"dtsRollup": {
"enabled": true,
"untrimmedFilePath": "",
"publicTrimmedFilePath": "./types/latest/monitor-ingestion.d.ts"
"publicTrimmedFilePath": "dist/monitor-ingestion.d.ts"
},
"messages": {
"tsdocMessageReporting": {

Просмотреть файл

@ -1,122 +0,0 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// https://github.com/karma-runner/karma-chrome-launcher
process.env.CHROME_BIN = require("puppeteer").executablePath();
require("dotenv").config();
const { relativeRecordingsPath } = require("@azure-tools/test-recorder");
process.env.RECORDINGS_RELATIVE_PATH = relativeRecordingsPath();
module.exports = function (config) {
config.set({
// base path that will be used to resolve all patterns (eg. files, exclude)
basePath: "./",
// frameworks to use
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter
frameworks: ["mocha"],
plugins: [
"karma-mocha",
"karma-mocha-reporter",
"karma-chrome-launcher",
"karma-firefox-launcher",
"karma-env-preprocessor",
"karma-coverage",
"karma-junit-reporter",
],
// list of files / patterns to load in the browser
files: [
"dist-test/index.browser.js",
{ pattern: "dist-test/index.browser.js.map", type: "html", included: false, served: true },
],
// list of files / patterns to exclude
exclude: [],
// preprocess matching files before serving them to the browser
// available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
preprocessors: {
"**/*.js": ["env"],
// IMPORTANT: COMMENT following line if you want to debug in your browsers!!
// Preprocess source file to calculate code coverage, however this will make source file unreadable
//"dist-test/index.browser.js": ["coverage"]
},
envPreprocessor: [
"TEST_MODE",
"LOGS_INGESTION_ENDPOINT",
"DATA_COLLECTION_RULE_ID",
"AZURE_CLIENT_ID",
"AZURE_CLIENT_SECRET",
"AZURE_TENANT_ID",
"RECORDINGS_RELATIVE_PATH",
],
// test results reporter to use
// possible values: 'dots', 'progress'
// available reporters: https://npmjs.org/browse/keyword/karma-reporter
reporters: ["mocha", "coverage", "junit"],
coverageReporter: {
// specify a common output directory
dir: "coverage-browser/",
reporters: [{ type: "cobertura", subdir: ".", file: "cobertura-coverage.xml" }],
},
junitReporter: {
outputDir: "", // results will be saved as $outputDir/$browserName.xml
outputFile: "test-results.browser.xml", // if included, results will be saved as $outputDir/$browserName/$outputFile
suite: "", // suite will become the package name attribute in xml testsuite element
useBrowserName: false, // add browser name to report and classes names
nameFormatter: undefined, // function (browser, result) to customize the name attribute in xml testcase element
classNameFormatter: undefined, // function (browser, result) to customize the classname attribute in xml testcase element
properties: {}, // key value pair of properties to add to the <properties> section of the report
},
// web server port
port: 9876,
// enable / disable colors in the output (reporters and logs)
colors: true,
// level of logging
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
logLevel: config.LOG_INFO,
// enable / disable watching file and executing tests whenever any file changes
autoWatch: false,
// --no-sandbox allows our tests to run in Linux without having to change the system.
// --disable-web-security allows us to authenticate from the browser without having to write tests using interactive auth, which would be far more complex.
browsers: ["ChromeHeadlessNoSandbox"],
customLaunchers: {
ChromeHeadlessNoSandbox: {
base: "ChromeHeadless",
flags: ["--no-sandbox", "--disable-web-security"],
},
},
// Continuous Integration mode
// if true, Karma captures browsers, runs the tests and exits
singleRun: true,
// Concurrency level
// how many browser should be started simultaneous
concurrency: 1,
browserNoActivityTimeout: 600000,
browserDisconnectTimeout: 10000,
browserDisconnectTolerance: 3,
client: {
mocha: {
// change Karma's debug.html to the mocha web reporter
reporter: "html",
timeout: "600000",
},
},
});
};

Просмотреть файл

@ -3,15 +3,9 @@
"version": "1.1.1",
"description": "Azure Monitor Ingestion library",
"sdk-type": "client",
"main": "dist/index.js",
"module": "dist-esm/src/index.js",
"browser": {
"./dist-esm/src/gZippingPolicy.js": "./dist-esm/src/gZippingPolicy.browser.js",
"./dist-esm/src/utils/getBinarySize.js": "./dist-esm/src/utils/getBinarySize.browser.js"
},
"react-native": {
"./dist-esm/src/gZippingPolicy.js": "./dist-esm/src/gZippingPolicy.browser.js"
},
"main": "./dist/commonjs/index.js",
"module": "./dist/esm/index.js",
"browser": "./dist/browser/index.js",
"//metadata": {
"constantPaths": [
{
@ -28,38 +22,36 @@
}
]
},
"types": "types/latest/monitor-ingestion.d.ts",
"types": "./dist/commonjs/index.d.ts",
"scripts": {
"build": "npm run clean && tsc -p . && npm run build:nodebrowser && dev-tool run extract-api",
"build:browser": "tsc -p . && dev-tool run bundle",
"build:node": "tsc -p . && dev-tool run bundle",
"build": "npm run clean && dev-tool run build-package && dev-tool run extract-api",
"build:browser": "dev-tool run build-package && dev-tool run bundle",
"build:node": "dev-tool run build-package && dev-tool run bundle",
"build:nodebrowser": "dev-tool run bundle",
"build:samples": "dev-tool samples run samples-dev",
"build:test": "tsc -p . && dev-tool run bundle",
"build:test": "dev-tool run build-package && dev-tool run bundle",
"check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"",
"clean": "dev-tool run vendored rimraf --glob dist dist-* temp types *.tgz *.log",
"execute:samples": "echo Obsolete",
"extract-api": "tsc -p . && dev-tool run extract-api",
"extract-api": "dev-tool run build-package && dev-tool run extract-api",
"format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"",
"generate:client": "autorest --typescript ./swagger/README.md",
"integration-test": "npm run integration-test:node && npm run integration-test:browser",
"integration-test:browser": "dev-tool run test:browser",
"integration-test:node": "dev-tool run test:node-ts-input -- --timeout 1200000 'test/**/*.spec.ts'",
"integration-test:browser": "npm run clean && dev-tool run build-package && dev-tool run build-test && dev-tool run test:vitest --browser",
"integration-test:node": "dev-tool run test:vitest",
"lint": "eslint package.json api-extractor.json src test",
"lint:fix": "eslint package.json api-extractor.json src test --fix --fix-type [problem,suggestion]",
"pack": "npm pack 2>&1",
"test": "npm run clean && tsc -p . && npm run unit-test:node && dev-tool run bundle && npm run unit-test:browser && npm run integration-test",
"test": "npm run clean && dev-tool run build-package && npm run unit-test:node && dev-tool run bundle && npm run unit-test:browser && npm run integration-test",
"test:browser": "npm run clean && npm run build:test && npm run integration-test:browser",
"test:node": "npm run clean && tsc -p . && npm run integration-test:node",
"test:node": "npm run clean && dev-tool run build-package && npm run integration-test:node",
"unit-test": "npm run unit-test:node && npm run unit-test:browser",
"unit-test:browser": "npm run integration-test:browser",
"unit-test:node": "npm run integration-test:node",
"unit-test:browser": "npm run clean && dev-tool run build-package && dev-tool run build-test && dev-tool run test:vitest --browser",
"unit-test:node": "dev-tool run test:vitest",
"update-snippets": "echo skipped"
},
"files": [
"dist/",
"dist-esm/src/",
"types/latest/",
"README.md",
"LICENSE"
],
@ -92,39 +84,22 @@
"tslib": "^2.2.0"
},
"devDependencies": {
"@azure-tools/test-credential": "^1.0.4",
"@azure-tools/test-recorder": "^3.0.0",
"@azure-tools/test-utils": "^1.0.1",
"@azure-tools/test-credential": "^2.0.0",
"@azure-tools/test-recorder": "^4.1.0",
"@azure-tools/test-utils-vitest": "^1.0.0",
"@azure/dev-tool": "^1.0.0",
"@azure/eslint-plugin-azure-sdk": "^3.0.0",
"@azure/identity": "^4.0.1",
"@azure/monitor-query": "^1.2.0-beta.3",
"@types/chai": "^4.1.6",
"@types/mocha": "^10.0.0",
"@azure/identity": "^4.5.0",
"@azure/monitor-query": "^1.3.1",
"@types/node": "^18.0.0",
"@types/pako": "^2.0.0",
"@types/sinon": "^17.0.0",
"chai": "^4.2.0",
"@vitest/browser": "^2.1.4",
"@vitest/coverage-istanbul": "^2.1.4",
"dotenv": "^16.0.0",
"eslint": "^9.9.0",
"inherits": "^2.0.3",
"karma": "^6.2.0",
"karma-chrome-launcher": "^3.0.0",
"karma-coverage": "^2.0.0",
"karma-env-preprocessor": "^0.1.1",
"karma-firefox-launcher": "^1.1.0",
"karma-json-preprocessor": "^0.3.3",
"karma-json-to-file-reporter": "^1.0.1",
"karma-junit-reporter": "^2.0.1",
"karma-mocha": "^2.0.1",
"karma-mocha-reporter": "^2.2.5",
"mocha": "^10.0.0",
"nyc": "^17.0.0",
"sinon": "^17.0.0",
"source-map-support": "^0.5.9",
"ts-node": "^10.0.0",
"playwright": "^1.48.2",
"typescript": "~5.6.2",
"util": "^0.12.1"
"vitest": "^2.1.4"
},
"//sampleConfiguration": {
"skipFolder": false,
@ -135,5 +110,42 @@
"requiredResources": {
"Azure Monitor": "https://docs.microsoft.com/azure/azure-monitor/"
}
},
"type": "module",
"tshy": {
"exports": {
"./package.json": "./package.json",
".": "./src/index.ts"
},
"dialects": [
"esm",
"commonjs"
],
"esmDialects": [
"browser",
"react-native"
],
"selfLink": false
},
"exports": {
"./package.json": "./package.json",
".": {
"browser": {
"types": "./dist/browser/index.d.ts",
"default": "./dist/browser/index.js"
},
"react-native": {
"types": "./dist/react-native/index.d.ts",
"default": "./dist/react-native/index.js"
},
"import": {
"types": "./dist/esm/index.d.ts",
"default": "./dist/esm/index.js"
},
"require": {
"types": "./dist/commonjs/index.d.ts",
"default": "./dist/commonjs/index.js"
}
}
}
}

Просмотреть файл

@ -8,10 +8,9 @@
import { DefaultAzureCredential } from "@azure/identity";
import { isAggregateLogsUploadError, LogsIngestionClient } from "@azure/monitor-ingestion";
import "dotenv/config";
require("dotenv").config();
async function main() {
async function main(): Promise<void> {
const logsIngestionEndpoint = process.env.LOGS_INGESTION_ENDPOINT || "logs_ingestion_endpoint";
const ruleId = process.env.DATA_COLLECTION_RULE_ID || "data_collection_rule_id";
const streamName = process.env.STREAM_NAME || "data_stream_name";
@ -34,7 +33,7 @@ async function main() {
await client.upload(ruleId, streamName, logs);
} catch (e) {
if (isAggregateLogsUploadError(e)) {
let aggregateErrors = e.errors;
const aggregateErrors = e.errors;
if (aggregateErrors.length > 0) {
console.log(
"Some logs have failed to complete ingestion. Number of error batches=",

Просмотреть файл

@ -10,21 +10,20 @@ import { DefaultAzureCredential } from "@azure/identity";
import {
isAggregateLogsUploadError,
LogsIngestionClient,
LogsUploadFailure,
type LogsUploadFailure,
} from "@azure/monitor-ingestion";
import "dotenv/config";
require("dotenv").config();
async function main() {
async function main(): Promise<void> {
const logsIngestionEndpoint = process.env.LOGS_INGESTION_ENDPOINT || "logs_ingestion_endpoint";
const streamName = process.env.STREAM_NAME || "data_stream_name";
const credential = new DefaultAzureCredential();
const client = new LogsIngestionClient(logsIngestionEndpoint, credential);
let abortController = new AbortController();
const abortController = new AbortController();
function errorCallback(uploadLogsError: LogsUploadFailure) {
function errorCallback(uploadLogsError: LogsUploadFailure): void {
if (
(uploadLogsError.cause as Error).message ===
uploadLogsError.cause.message ===
"Data collection rule with immutable Id 'immutable-id-123' not found."
) {
abortController.abort();
@ -49,7 +48,7 @@ async function main() {
});
} catch (e) {
if (isAggregateLogsUploadError(e)) {
let aggregateErrors = e.errors;
const aggregateErrors = e.errors;
if (aggregateErrors.length > 0) {
console.log(
"Some logs have failed to complete ingestion. Number of error batches=",

Просмотреть файл

@ -8,11 +8,9 @@
import { isAggregateLogsUploadError, LogsIngestionClient } from "@azure/monitor-ingestion";
import { DefaultAzureCredential } from "@azure/identity";
import "dotenv/config";
import * as dotenv from "dotenv";
dotenv.config();
export async function main() {
async function main(): Promise<void> {
const logsIngestionEndpoint = process.env.LOGS_INGESTION_ENDPOINT || "logs_ingestion_endpoint";
const ruleId = process.env.DATA_COLLECTION_RULE_ID || "immutable_dcr_id";
const streamName = process.env.STREAM_NAME || "stream_name";
@ -24,7 +22,7 @@ export async function main() {
});
console.log("All the logs provided are successfully ingested");
} catch (e) {
let aggregateErrors = isAggregateLogsUploadError(e) ? e.errors : [];
const aggregateErrors = isAggregateLogsUploadError(e) ? e.errors : [];
if (aggregateErrors.length > 0) {
console.log("Some logs have failed to complete ingestion");
for (const error of aggregateErrors) {

Просмотреть файл

@ -7,11 +7,9 @@
*/
import { DefaultAzureCredential } from "@azure/identity";
import { isAggregateLogsUploadError, LogsIngestionClient } from "@azure/monitor-ingestion";
import "dotenv/config";
import * as dotenv from "dotenv";
dotenv.config();
export async function main() {
async function main(): Promise<void> {
const logsIngestionEndpoint = process.env.LOGS_INGESTION_ENDPOINT || "logs_ingestion_endpoint";
const ruleId = process.env.DATA_COLLECTION_RULE_ID || "data_collection_rule_id";
const streamName = process.env.STREAM_NAME || "data_stream_name";
@ -32,7 +30,7 @@ export async function main() {
try {
await client.upload(ruleId, streamName, logs);
} catch (e) {
let aggregateErrors = isAggregateLogsUploadError(e) ? e.errors : [];
const aggregateErrors = isAggregateLogsUploadError(e) ? e.errors : [];
console.log(
"Some logs have failed to complete ingestion. Length of errors =",
aggregateErrors.length,

Просмотреть файл

@ -7,10 +7,9 @@
import { DefaultAzureCredential } from "@azure/identity";
import { isAggregateLogsUploadError, LogsIngestionClient } from "@azure/monitor-ingestion";
import "dotenv/config";
require("dotenv").config();
async function main() {
async function main(): Promise<void> {
const logsIngestionEndpoint = process.env.LOGS_INGESTION_ENDPOINT || "logs_ingestion_endpoint";
const ruleId = process.env.DATA_COLLECTION_RULE_ID || "data_collection_rule_id";
const streamName = process.env.STREAM_NAME || "data_stream_name";
@ -32,7 +31,7 @@ async function main() {
await client.upload(ruleId, streamName, logs, { maxConcurrency: 1 });
} catch (e) {
if (isAggregateLogsUploadError(e)) {
let aggregateErrors = e.errors;
const aggregateErrors = e.errors;
console.log(
"Some logs have failed to complete ingestion. Length of errors =",
aggregateErrors.length,

Просмотреть файл

@ -10,12 +10,11 @@ import { DefaultAzureCredential } from "@azure/identity";
import {
isAggregateLogsUploadError,
LogsIngestionClient,
LogsUploadFailure,
type LogsUploadFailure,
} from "@azure/monitor-ingestion";
import "dotenv/config";
require("dotenv").config();
async function main() {
async function main(): Promise<void> {
const logsIngestionEndpoint = process.env.LOGS_INGESTION_ENDPOINT || "logs_ingestion_endpoint";
const ruleId = process.env.DATA_COLLECTION_RULE_ID || "data_collection_rule_id";
const streamName = process.env.STREAM_NAME || "data_stream_name";
@ -31,10 +30,10 @@ async function main() {
});
}
let failedLogs: Record<string, unknown>[] = [];
async function errorCallback(uploadLogsError: LogsUploadFailure) {
const failedLogs: Record<string, unknown>[] = [];
function errorCallback(uploadLogsError: LogsUploadFailure): void {
if (
(uploadLogsError.cause as Error).message ===
uploadLogsError.cause.message ===
"Data collection rule with immutable Id 'immutable-id-123' not found."
) {
// track failed logs here
@ -49,7 +48,7 @@ async function main() {
onError: errorCallback,
});
} catch (e) {
let aggregateErrors = isAggregateLogsUploadError(e) ? e.errors : [];
const aggregateErrors = isAggregateLogsUploadError(e) ? e.errors : [];
if (aggregateErrors.length > 0) {
console.log(
"Some logs have failed to complete ingestion. Number of error batches=",
@ -69,7 +68,8 @@ async function main() {
await client.upload(ruleId, "Custom-MyTableRawData", failedLogs, {
maxConcurrency: 1,
});
} finally {
} catch {
// Do nothing
}
}
}

Просмотреть файл

@ -3,7 +3,7 @@
import type { PipelinePolicy } from "@azure/core-rest-pipeline";
import * as zlib from "zlib";
import { promisify } from "util";
import { promisify } from "node:util";
const gzip = promisify(zlib.gzip);
/**
@ -15,13 +15,13 @@ export const GZippingPolicy: PipelinePolicy = {
name: gZippingPolicyName,
sendRequest: async (req, next) => {
if (req.body) {
const buffer = await gzipping(req.body);
const buffer = await gzipping(req.body as string | ArrayBuffer | NodeJS.ArrayBufferView);
req.body = buffer;
}
return next(req);
},
};
function gzipping(body: any): Promise<Buffer> {
function gzipping(body: string | ArrayBuffer | NodeJS.ArrayBufferView): Promise<Buffer> {
return gzip(body);
}

Просмотреть файл

@ -8,13 +8,13 @@
import * as coreClient from "@azure/core-client";
import * as coreAuth from "@azure/core-auth";
import * as Parameters from "./models/parameters";
import * as Mappers from "./models/mappers";
import { GeneratedMonitorIngestionClientContext } from "./generatedMonitorIngestionClientContext";
import * as Parameters from "./models/parameters.js";
import * as Mappers from "./models/mappers.js";
import { GeneratedMonitorIngestionClientContext } from "./generatedMonitorIngestionClientContext.js";
import {
GeneratedMonitorIngestionClientOptionalParams,
UploadOptionalParams
} from "./models";
} from "./models/index.js";
/** @internal */
export class GeneratedMonitorIngestionClient extends GeneratedMonitorIngestionClientContext {

Просмотреть файл

@ -8,7 +8,7 @@
import * as coreClient from "@azure/core-client";
import * as coreAuth from "@azure/core-auth";
import { GeneratedMonitorIngestionClientOptionalParams } from "./models";
import { GeneratedMonitorIngestionClientOptionalParams } from "./models/index.js";
/** @internal */
export class GeneratedMonitorIngestionClientContext extends coreClient.ServiceClient {

6
sdk/monitor/monitor-ingestion/src/generated/index.ts сгенерированный
Просмотреть файл

@ -6,6 +6,6 @@
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
*/
export * from "./models";
export { GeneratedMonitorIngestionClient } from "./generatedMonitorIngestionClient";
export { GeneratedMonitorIngestionClientContext } from "./generatedMonitorIngestionClientContext";
export * from "./models/index.js";
export { GeneratedMonitorIngestionClient } from "./generatedMonitorIngestionClient.js";
export { GeneratedMonitorIngestionClientContext } from "./generatedMonitorIngestionClientContext.js";

Просмотреть файл

@ -5,6 +5,6 @@
* This package is used for logs ingestion for the [Azure Monitor](https://docs.microsoft.com/azure/azure-monitor/overview) resource.
* @packageDocumentation
*/
export * from "./logsIngestionClient";
export * from "./models";
export { KnownMonitorAudience } from "./constants";
export * from "./logsIngestionClient.js";
export * from "./models.js";
export { KnownMonitorAudience } from "./constants.js";

Просмотреть файл

@ -3,14 +3,14 @@
import type { TokenCredential } from "@azure/core-auth";
import type { CommonClientOptions } from "@azure/core-client";
import { GeneratedMonitorIngestionClient } from "./generated";
import type { LogsUploadFailure, LogsUploadOptions } from "./models";
import { AggregateLogsUploadError } from "./models";
import { GZippingPolicy } from "./gZippingPolicy";
import { concurrentRun } from "./utils/concurrentPoolHelper";
import { splitDataToChunks } from "./utils/splitDataToChunksHelper";
import { GeneratedMonitorIngestionClient } from "./generated/index.js";
import type { LogsUploadFailure, LogsUploadOptions } from "./models.js";
import { AggregateLogsUploadError } from "./models.js";
import { GZippingPolicy } from "./gZippingPolicy.js";
import { concurrentRun } from "./utils/concurrentPoolHelper.js";
import { splitDataToChunks } from "./utils/splitDataToChunksHelper.js";
import { isError } from "@azure/core-util";
import { KnownMonitorAudience } from "./constants";
import { KnownMonitorAudience } from "./constants.js";
/**
* Options for Monitor Logs Ingestion Client
*/
@ -72,7 +72,7 @@ export class LogsIngestionClient {
ruleId: string,
streamName: string,
logs: Record<string, unknown>[],
// eslint-disable-next-line @azure/azure-sdk/ts-naming-options
options?: LogsUploadOptions,
): Promise<void> {
// TODO: Do we need to worry about memory issues when loading data for 100GB ?? JS max allocation is 1 or 2GB
@ -91,12 +91,15 @@ export class LogsIngestionClient {
contentEncoding: "gzip",
abortSignal: options?.abortSignal,
});
} catch (e: any) {
} catch (e: unknown) {
if (options?.onError) {
options.onError({ failedLogs: eachChunk, cause: isError(e) ? e : new Error(e) });
options.onError({
failedLogs: eachChunk,
cause: isError(e) ? e : new Error(e as string),
});
}
uploadResultErrors.push({
cause: isError(e) ? e : new Error(e),
cause: isError(e) ? e : new Error(e as string),
failedLogs: eachChunk,
});
}

Просмотреть файл

@ -13,14 +13,13 @@ export async function concurrentRun<T>(
const promises: Array<Promise<void>> = [];
function removePromise(p: Promise<void>): void {
promises.splice(promises.indexOf(p), 1);
void promises.splice(promises.indexOf(p), 1);
}
while (dataQueue.length) {
while (dataQueue.length && promises.length < maxConcurrency) {
const worker = dataQueue.pop();
const promise = callback(worker!);
// eslint-disable-next-line promise/catch-or-return
promise.finally(() => removePromise(promise));
void promise.finally(() => removePromise(promise));
promises.push(promise);
}
if (promises.length === maxConcurrency) {

Просмотреть файл

@ -1,7 +1,7 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
import { getBinarySize } from "./getBinarySize";
import { getBinarySize } from "./getBinarySize.js";
/**
* @internal

Просмотреть файл

@ -1,8 +1,8 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
import { splitDataToChunks } from "../../src/utils/splitDataToChunksHelper";
import { assert } from "chai";
import { splitDataToChunks } from "../../src/utils/splitDataToChunksHelper.js";
import { describe, it, assert } from "vitest";
describe("LogsIngestionClient unit tests", function () {
it("creates one chunk for single log record of 1MB size", () => {

Просмотреть файл

@ -1,20 +1,19 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
import type { LogsUploadFailure } from "../../src";
import { isAggregateLogsUploadError, LogsIngestionClient } from "../../src";
import type { Context } from "mocha";
import { assert } from "chai";
import type { LogsUploadFailure } from "../../src/index.js";
import { isAggregateLogsUploadError, LogsIngestionClient } from "../../src/index.js";
import type { AdditionalPolicyConfig } from "@azure/core-client";
import type { RecorderAndLogsClient } from "./shared/testShared";
import type { RecorderAndLogsClient } from "./shared/testShared.js";
import {
createClientAndStartRecorder,
getDcrId,
getLogsIngestionEndpoint,
loggerForTest,
} from "./shared/testShared";
} from "./shared/testShared.js";
import { Recorder } from "@azure-tools/test-recorder";
import { createTestCredential } from "@azure-tools/test-credential";
import { describe, it, assert, beforeEach, afterEach } from "vitest";
function createFailedPolicies(failedInterval: { isFailed: boolean }): AdditionalPolicyConfig[] {
return [
@ -38,9 +37,9 @@ describe("LogsIngestionClient live tests", function () {
let recorder: Recorder;
let recordedClient: RecorderAndLogsClient;
let client: LogsIngestionClient;
beforeEach(async function (this: Context) {
beforeEach(async function (ctx) {
loggerForTest.verbose(`Recorder: starting...`);
recorder = new Recorder(this.currentTest);
recorder = new Recorder(ctx);
recordedClient = await createClientAndStartRecorder(recorder);
client = recordedClient.client;
});
@ -152,7 +151,7 @@ describe("LogsIngestionClient live tests", function () {
function errorCallback(uploadLogsError: LogsUploadFailure): void {
if (
(uploadLogsError.cause as Error).message ===
uploadLogsError.cause.message ===
"Data collection rule with immutable Id 'immutable-id-123' not found."
) {
++errorCallbackCount;
@ -237,7 +236,7 @@ export function getObjects(logsCount: number): LogData[] {
}
/**
* The data fields should match the column names exactly even with the
* captilization in order for the data to show up in the logs
* capitalization in order for the data to show up in the logs
*/
export type LogData = {
Time: Date;

Просмотреть файл

@ -1,14 +1,15 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
import { createTestCredential } from "@azure-tools/test-credential";
import type { Recorder, RecorderStartOptions } from "@azure-tools/test-recorder";
import { assertEnvironmentVariable, env } from "@azure-tools/test-recorder";
import { createClientLogger } from "@azure/logger";
import { LogsIngestionClient } from "../../../src";
import { LogsIngestionClient } from "../../../src/index.js";
import type { ExponentialRetryPolicyOptions } from "@azure/core-rest-pipeline";
import type { AdditionalPolicyConfig } from "@azure/core-client";
export const loggerForTest = createClientLogger("test");
const envSetupForPlayback: Record<string, string> = {
LOGS_INGESTION_ENDPOINT:
"https://thisurl-logsingestion-somethinglocation123abcrd.monitor.azure.com",

Просмотреть файл

@ -0,0 +1,10 @@
{
"extends": "./.tshy/build.json",
"include": ["./src/**/*.ts", "./src/**/*.mts", "./test/**/*.spec.ts", "./test/**/*.mts"],
"exclude": ["./test/**/node/**/*.ts", "./test/snippets.spec.ts"],
"compilerOptions": {
"outDir": "./dist-test/browser",
"rootDir": ".",
"skipLibCheck": true
}
}

Просмотреть файл

@ -1,12 +1,13 @@
{
"extends": "../../../tsconfig",
"compilerOptions": {
"outDir": "./dist-esm",
"declarationDir": "./types",
"paths": {
"@azure/monitor-ingestion": ["./src/index"]
},
"lib": ["DOM"]
"lib": ["DOM"],
"module": "NodeNext",
"moduleResolution": "NodeNext",
"rootDir": "."
},
"include": ["src/**/*.ts", "test/**/*.ts", "samples-dev/**/*.ts"]
"include": ["src/**/*.ts", "src/**/*.mts", "src/**/*.cts", "samples-dev/**/*.ts", "test/**/*.ts"]
}

Просмотреть файл

@ -0,0 +1,17 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
import { defineConfig, mergeConfig } from "vitest/config";
import viteConfig from "../../../vitest.browser.shared.config.ts";
export default mergeConfig(
viteConfig,
defineConfig({
test: {
include: [
"dist-test/browser/test/**/*.spec.js",
],
},
}),
);

Просмотреть файл

@ -0,0 +1,15 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
import { defineConfig, mergeConfig } from "vitest/config";
import viteConfig from "../../../vitest.shared.config.ts";
export default mergeConfig(
viteConfig,
defineConfig({
test: {
include: ["test/**/*.spec.ts"],
exclude: ["test/snippets.spec.ts"],
},
}),
);