зеркало из https://github.com/Azure/autorest.git
Sourcemap fixing (#4206)
This commit is contained in:
Родитель
d9ca0d8aaf
Коммит
9aa1f112c5
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"changes": [
|
||||
{
|
||||
"packageName": "@autorest/common",
|
||||
"comment": "",
|
||||
"type": "none"
|
||||
}
|
||||
],
|
||||
"packageName": "@autorest/common",
|
||||
"email": "tiguerin@microsoft.com"
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"changes": [
|
||||
{
|
||||
"packageName": "@autorest/core",
|
||||
"comment": "**Fix** sourcemap for multiple plugins",
|
||||
"type": "patch"
|
||||
}
|
||||
],
|
||||
"packageName": "@autorest/core",
|
||||
"email": "tiguerin@microsoft.com"
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"changes": [
|
||||
{
|
||||
"packageName": "@azure-tools/datastore",
|
||||
"comment": "**Fix** Sourcemap computation",
|
||||
"type": "patch"
|
||||
}
|
||||
],
|
||||
"packageName": "@azure-tools/datastore",
|
||||
"email": "tiguerin@microsoft.com"
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"changes": [
|
||||
{
|
||||
"packageName": "@azure-tools/oai2-to-oai3",
|
||||
"comment": "",
|
||||
"type": "none"
|
||||
}
|
||||
],
|
||||
"packageName": "@azure-tools/oai2-to-oai3",
|
||||
"email": "tiguerin@microsoft.com"
|
||||
}
|
|
@ -97,5 +97,8 @@ export async function processCodeModel(codeModel: DataHandle, sink: DataSink): P
|
|||
}
|
||||
}
|
||||
|
||||
return sink.WriteData("codeModel.yaml", StringifyAst(ast), ["fix-me"], undefined, mapping, [codeModel]);
|
||||
return sink.writeData("codeModel.yaml", StringifyAst(ast), ["fix-me"], undefined, {
|
||||
mappings: mapping,
|
||||
mappingSources: [codeModel],
|
||||
});
|
||||
}
|
||||
|
|
|
@ -7,14 +7,6 @@ import { createHash } from "crypto";
|
|||
|
||||
const md5 = (content: any) => createHash("md5").update(JSON.stringify(content)).digest("hex");
|
||||
|
||||
function encode(path: string) {
|
||||
return Buffer.from(path).toString("base64");
|
||||
}
|
||||
|
||||
function decode(str: string) {
|
||||
return Buffer.from(str, "base64").toString("utf8");
|
||||
}
|
||||
|
||||
let cacheFolder: string | undefined;
|
||||
async function getCacheFolder() {
|
||||
if (!cacheFolder) {
|
||||
|
@ -53,14 +45,7 @@ export async function readCache(key: string | undefined, sink: DataSink): Promis
|
|||
if (await isDirectory(folder)) {
|
||||
for (const each of await readdir(folder)) {
|
||||
const item = JSON.parse(await readFile(join(folder, each)));
|
||||
const dh = await sink.WriteData(
|
||||
item.key,
|
||||
item.content,
|
||||
item.identity,
|
||||
item.artifactType,
|
||||
undefined,
|
||||
undefined,
|
||||
);
|
||||
const dh = await sink.writeData(item.key, item.content, item.identity, item.artifactType);
|
||||
handles.push(dh);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -262,11 +262,11 @@ export class AutoRestExtension extends EventEmitter {
|
|||
const friendly2internal: (name: string) => Promise<string | undefined> = async (name) =>
|
||||
(
|
||||
(await inputFileHandles).filter(
|
||||
(h) => h.Description === name || decodeURIComponent(h.Description) === decodeURIComponent(name),
|
||||
(h) => h.description === name || decodeURIComponent(h.description) === decodeURIComponent(name),
|
||||
)[0] || {}
|
||||
).key;
|
||||
const internal2friendly: (name: string) => Promise<string | undefined> = async (key) =>
|
||||
((await inputScope.Read(key)) || <any>{}).Description;
|
||||
((await inputScope.read(key)) || <any>{}).description;
|
||||
|
||||
const writeFileToSinkAndNotify = async (
|
||||
filename: string,
|
||||
|
@ -281,7 +281,7 @@ export class AutoRestExtension extends EventEmitter {
|
|||
let handle: DataHandle;
|
||||
if (typeof (<any>sourceMap).mappings === "string") {
|
||||
onFile(
|
||||
(handle = await sink.WriteDataWithSourceMap(
|
||||
(handle = await sink.writeDataWithSourceMap(
|
||||
filename,
|
||||
content,
|
||||
artifactType,
|
||||
|
@ -291,20 +291,16 @@ export class AutoRestExtension extends EventEmitter {
|
|||
);
|
||||
} else {
|
||||
onFile(
|
||||
(handle = await sink.WriteData(
|
||||
filename,
|
||||
content,
|
||||
["fix-me-here2"],
|
||||
artifactType,
|
||||
<Array<Mapping>>sourceMap,
|
||||
await inputFileHandles,
|
||||
)),
|
||||
(handle = await sink.writeData(filename, content, ["fix-me-here2"], artifactType, {
|
||||
mappings: sourceMap as any,
|
||||
mappingSources: await inputFileHandles,
|
||||
})),
|
||||
);
|
||||
}
|
||||
return {
|
||||
uri: handle.key,
|
||||
type: handle.artifactType,
|
||||
content: await handle.ReadData(),
|
||||
content: await handle.readData(),
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -315,12 +311,12 @@ export class AutoRestExtension extends EventEmitter {
|
|||
},
|
||||
async ReadFile(filename: string): Promise<string> {
|
||||
try {
|
||||
const file = await inputScope.ReadStrict((await friendly2internal(filename)) || filename);
|
||||
const file = await inputScope.readStrict((await friendly2internal(filename)) || filename);
|
||||
return await file.ReadData();
|
||||
} catch (E) {
|
||||
// try getting the file from the output-folder
|
||||
try {
|
||||
const result = await context.fileSystem.ReadFile(`${context.config.outputFolderUri}${filename}`);
|
||||
const result = await context.fileSystem.read(`${context.config.outputFolderUri}${filename}`);
|
||||
return result;
|
||||
} catch (E2) {
|
||||
// no file there!
|
||||
|
@ -362,7 +358,7 @@ export class AutoRestExtension extends EventEmitter {
|
|||
.filter((x) => {
|
||||
return typeof artifactType !== "string" || artifactType === x.artifactType;
|
||||
})
|
||||
.map((x) => x.Description);
|
||||
.map((x) => x.description);
|
||||
|
||||
// if the request returned items, or they didn't specify a path/artifacttype
|
||||
if (inputs.length > 0 || artifactType === null || artifactType === undefined) {
|
||||
|
@ -372,9 +368,7 @@ export class AutoRestExtension extends EventEmitter {
|
|||
// we'd like to be able to ask the host for a file directly (but only if it's supposed to be in the output-folder)
|
||||
const t = context.config.outputFolderUri.length;
|
||||
return (
|
||||
await context.fileSystem.EnumerateFileUris(
|
||||
ensureIsFolderUri(`${context.config.outputFolderUri}${artifactType || ""}`),
|
||||
)
|
||||
await context.fileSystem.list(ensureIsFolderUri(`${context.config.outputFolderUri}${artifactType || ""}`))
|
||||
).map((each) => each.substr(t));
|
||||
},
|
||||
|
||||
|
|
|
@ -33,8 +33,8 @@ export class AllOfCleaner {
|
|||
}
|
||||
|
||||
async function allofCleaner(config: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map(async (x) => input.readStrict(x)));
|
||||
const result: Array<DataHandle> = [];
|
||||
const inputs = await Promise.all((await input.enum()).map(async (x) => input.readStrict(x)));
|
||||
const result: DataHandle[] = [];
|
||||
|
||||
for (const each of inputs) {
|
||||
const fixer = new AllOfCleaner(each);
|
||||
|
|
|
@ -11,7 +11,7 @@ export function createCommonmarkProcessorPlugin(): PipelinePlugin {
|
|||
const fileIn = await input.ReadStrict(file);
|
||||
const fileOut = await processCodeModel(fileIn, sink);
|
||||
file = file.substr(file.indexOf("/output/") + "/output/".length);
|
||||
results.push(await sink.Forward("code-model-v1", fileOut));
|
||||
results.push(await sink.forward("code-model-v1", fileOut));
|
||||
}
|
||||
return new QuickDataSource(results, input.pipeState);
|
||||
};
|
||||
|
|
|
@ -101,11 +101,15 @@ async function renameComponentsKeys(config: AutorestContext, input: DataSource,
|
|||
for (const each of inputs) {
|
||||
const processor = new ComponentKeyRenamer(each);
|
||||
result.push(
|
||||
await sink.WriteObject(
|
||||
await sink.writeObject(
|
||||
"oai3.component-renamed.json",
|
||||
await processor.getOutput(),
|
||||
each.identity,
|
||||
"openapi-document-renamed",
|
||||
{
|
||||
mappings: await processor.getSourceMappings(),
|
||||
mappingSources: [each],
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -96,7 +96,7 @@ export function createComponentModifierPlugin(): PipelinePlugin {
|
|||
}
|
||||
}
|
||||
|
||||
return sink.WriteObject(fileIn.Description, o, fileIn.identity);
|
||||
return sink.writeObject(fileIn.description, o, fileIn.identity);
|
||||
}
|
||||
return fileIn;
|
||||
});
|
||||
|
|
|
@ -239,7 +239,12 @@ async function clean(config: AutorestContext, input: DataSource, sink: DataSink)
|
|||
for (const each of inputs) {
|
||||
const processor = new ComponentsCleaner(each);
|
||||
const output = await processor.getOutput();
|
||||
result.push(await sink.writeObject("oai3.cleaned.json", output, each.identity, "openapi-document-cleaned"));
|
||||
result.push(
|
||||
await sink.writeObject("oai3.cleaned.json", output, each.identity, "openapi-document-cleaned", {
|
||||
mappings: await processor.getSourceMappings(),
|
||||
mappingSources: [each],
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
return new QuickDataSource(result, input.pipeState);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { DataHandle, IFileSystem, QuickDataSource } from "@azure-tools/datastore";
|
||||
import { Data, DataHandle, IFileSystem, QuickDataSource } from "@azure-tools/datastore";
|
||||
import { PipelinePlugin } from "../pipeline/common";
|
||||
import { convertOai2ToOai3Files } from "@azure-tools/oai2-to-oai3";
|
||||
import { clone } from "@azure-tools/linq";
|
||||
|
@ -6,20 +6,21 @@ import { clone } from "@azure-tools/linq";
|
|||
/* @internal */
|
||||
export function createSwaggerToOpenApi3Plugin(fileSystem?: IFileSystem): PipelinePlugin {
|
||||
return async (config, input, sink) => {
|
||||
const files = await input.Enum();
|
||||
const inputs: Array<DataHandle> = [];
|
||||
const files = await input.enum();
|
||||
const inputs: DataHandle[] = [];
|
||||
for (const file of files) {
|
||||
inputs.push(await input.readStrict(file));
|
||||
}
|
||||
const results = await convertOai2ToOai3Files(inputs);
|
||||
const resultHandles: Array<DataHandle> = [];
|
||||
for (const { result, name } of results) {
|
||||
const resultHandles: DataHandle[] = [];
|
||||
for (const { result, name, mappings } of results) {
|
||||
const input = inputs.find((x) => x.originalFullPath === name);
|
||||
if (input === undefined) {
|
||||
throw new Error(`Unexpected error while trying to map output of file ${name}. It cannot be found as an input.`);
|
||||
}
|
||||
const out = await sink.writeObject("OpenAPI", clone(result), input.identity);
|
||||
resultHandles.push(await sink.Forward(input.description, out));
|
||||
// TODO-TIM , mappings, inputs
|
||||
const out = await sink.writeObject("OpenAPI", clone(result), input.identity, undefined);
|
||||
resultHandles.push(await sink.forward(input.description, out));
|
||||
}
|
||||
return new QuickDataSource(resultHandles, input.pipeState);
|
||||
};
|
||||
|
|
|
@ -16,7 +16,7 @@ async function deduplicate(context: AutorestContext, input: DataSource, sink: Da
|
|||
const idm = !!context.config["deduplicate-inline-models"];
|
||||
|
||||
for (const each of values(inputs).where((input) => input.artifactType !== "profile-filter-log")) {
|
||||
const model = <any>await each.ReadObject();
|
||||
const model = <any>await each.readObject();
|
||||
|
||||
/*
|
||||
Disabling for now -- not sure if we need to skip this in the simple case anyway.
|
||||
|
@ -31,26 +31,18 @@ async function deduplicate(context: AutorestContext, input: DataSource, sink: Da
|
|||
// skip if it's already marked that it was done.
|
||||
if (model.info?.["x-ms-metadata"]?.deduplicated) {
|
||||
result.push(
|
||||
await sink.WriteObject(
|
||||
"oai3.model-deduplicated.json",
|
||||
model,
|
||||
each.identity,
|
||||
"openapi-document-deduplicated",
|
||||
[],
|
||||
),
|
||||
await sink.writeObject("oai3.model-deduplicated.json", model, each.identity, "openapi-document-deduplicated"),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
const deduplicator = new Deduplicator(model, idm);
|
||||
result.push(
|
||||
await sink.WriteObject(
|
||||
await sink.writeObject(
|
||||
"oai3.model-deduplicated.json",
|
||||
await deduplicator.getOutput(),
|
||||
each.identity,
|
||||
"openapi-document-deduplicated",
|
||||
[
|
||||
/* fix-me: Construct source map from the mappings returned by the deduplicator.s*/
|
||||
],
|
||||
/* fix-me: Construct source map from the mappings returned by the deduplicator.s*/
|
||||
),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { DataHandle, DataSource, Normalize, QuickDataSource, createSandbox, Stringify } from "@azure-tools/datastore";
|
||||
import { ResolveUri } from "@azure-tools/uri";
|
||||
import { resolveUri } from "@azure-tools/uri";
|
||||
import { AutorestContext } from "../context";
|
||||
import { Channel } from "../message";
|
||||
import { PipelinePlugin } from "../pipeline/common";
|
||||
|
@ -57,46 +57,38 @@ async function emitArtifact(
|
|||
const sink = config.DataStore.getDataSink();
|
||||
|
||||
if (isOutputArtifactOrMapRequested(config, artifactType + ".yaml")) {
|
||||
const h = await sink.WriteData(
|
||||
const h = await sink.writeData(
|
||||
`${++emitCtr}.yaml`,
|
||||
Stringify(await handle.ReadObject<any>()),
|
||||
Stringify(await handle.readObject<any>()),
|
||||
["fix-me"],
|
||||
artifactType,
|
||||
[] /*disabled source maps long ago */,
|
||||
[handle],
|
||||
);
|
||||
await emitArtifactInternal(config, artifactType + ".yaml", uri + ".yaml", h);
|
||||
}
|
||||
if (isOutputArtifactOrMapRequested(config, artifactType + ".norm.yaml")) {
|
||||
const h = await sink.WriteData(
|
||||
const h = await sink.writeData(
|
||||
`${++emitCtr}.norm.yaml`,
|
||||
Stringify(Normalize(await handle.ReadObject<any>())),
|
||||
Stringify(Normalize(await handle.readObject<any>())),
|
||||
["fix-me"],
|
||||
artifactType,
|
||||
[] /*disabled source maps long ago */,
|
||||
[handle],
|
||||
);
|
||||
await emitArtifactInternal(config, artifactType + ".norm.yaml", uri + ".norm.yaml", h);
|
||||
}
|
||||
if (isOutputArtifactOrMapRequested(config, artifactType + ".json")) {
|
||||
const h = await sink.WriteData(
|
||||
const h = await sink.writeData(
|
||||
`${++emitCtr}.json`,
|
||||
JSON.stringify(await handle.ReadObject<any>(), null, 2),
|
||||
JSON.stringify(await handle.readObject<any>(), null, 2),
|
||||
["fix-me"],
|
||||
artifactType,
|
||||
[] /*disabled source maps long ago */,
|
||||
[handle],
|
||||
);
|
||||
await emitArtifactInternal(config, artifactType + ".json", uri + ".json", h);
|
||||
}
|
||||
if (isOutputArtifactOrMapRequested(config, artifactType + ".norm.json")) {
|
||||
const h = await sink.WriteData(
|
||||
const h = await sink.writeData(
|
||||
`${++emitCtr}.norm.json`,
|
||||
JSON.stringify(Normalize(await handle.ReadObject<any>()), null, 2),
|
||||
JSON.stringify(Normalize(await handle.readObject<any>()), null, 2),
|
||||
["fix-me"],
|
||||
artifactType,
|
||||
[] /*disabled source maps long ago */,
|
||||
[handle],
|
||||
);
|
||||
await emitArtifactInternal(config, artifactType + ".norm.json", uri + ".norm.json", h);
|
||||
}
|
||||
|
@ -112,8 +104,8 @@ export async function emitArtifacts(
|
|||
isObject: boolean,
|
||||
): Promise<void> {
|
||||
const all = new Array<Promise<void>>();
|
||||
for (const key of await scope.Enum()) {
|
||||
const file = await scope.ReadStrict(key);
|
||||
for (const key of await scope.enum()) {
|
||||
const file = await scope.readStrict(key);
|
||||
const fileArtifact = file.artifactType;
|
||||
const ok = artifactTypeFilter
|
||||
? typeof artifactTypeFilter === "string"
|
||||
|
@ -124,7 +116,7 @@ export async function emitArtifacts(
|
|||
: true; // if it's null, just emit it.
|
||||
|
||||
if (ok) {
|
||||
all.push(emitArtifact(config, uriResolver(file.Description), file, isObject));
|
||||
all.push(emitArtifact(config, uriResolver(file.description), file, isObject));
|
||||
}
|
||||
}
|
||||
await Promise.all(all);
|
||||
|
@ -148,7 +140,7 @@ export function createArtifactEmitterPlugin(
|
|||
context,
|
||||
context.GetEntry("input-artifact") || null,
|
||||
(key) =>
|
||||
ResolveUri(
|
||||
resolveUri(
|
||||
context.config.outputFolderUri,
|
||||
safeEval<string>(context.GetEntry("output-uri-expr") || "$key", {
|
||||
$key: key,
|
||||
|
|
|
@ -14,6 +14,10 @@ async function deduplicateEnums(config: AutorestContext, input: DataSource, sink
|
|||
await deduplicator.getOutput(),
|
||||
each.identity,
|
||||
"openapi-document-enum-deduplicated",
|
||||
{
|
||||
mappings: await deduplicator.getSourceMappings(),
|
||||
mappingSources: [each],
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -57,35 +57,35 @@ function resolveNewIdentity(dataHandles: DataHandle[]): Map<string, string> {
|
|||
const map = new Map<string, string>();
|
||||
|
||||
if (dataHandles.length === 1) {
|
||||
const name = dataHandles[0].Description;
|
||||
const name = dataHandles[0].description;
|
||||
return new Map([[name, basename(name)]]);
|
||||
}
|
||||
|
||||
const root = resolveCommonRoot(dataHandles.map((x) => x.Description));
|
||||
const root = resolveCommonRoot(dataHandles.map((x) => x.description));
|
||||
for (const data of dataHandles) {
|
||||
if (!data.Description.startsWith(root)) {
|
||||
throw new Error(`Unexpected error: '${data.Description}' does not start with '${root}'`);
|
||||
if (!data.description.startsWith(root)) {
|
||||
throw new Error(`Unexpected error: '${data.description}' does not start with '${root}'`);
|
||||
}
|
||||
map.set(data.Description, data.Description.substring(root.length));
|
||||
map.set(data.description, data.description.substring(root.length));
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
async function normalizeIdentity(context: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map((x) => input.ReadStrict(x)));
|
||||
const inputs = await Promise.all((await input.enum()).map((x) => input.readStrict(x)));
|
||||
const identityMap = resolveNewIdentity(inputs);
|
||||
|
||||
const results = await Promise.all(
|
||||
inputs.map(async (input) => {
|
||||
const data = cloneDeep(await input.ReadObject());
|
||||
const newName = identityMap.get(input.Description);
|
||||
const data = cloneDeep(await input.readObject());
|
||||
const newName = identityMap.get(input.description);
|
||||
if (!newName) {
|
||||
throw new Error(`Unexpected error. Couldn't find mapping for data handle ${input.Description}`);
|
||||
throw new Error(`Unexpected error. Couldn't find mapping for data handle ${input.description}`);
|
||||
}
|
||||
updateFileRefs(data, newName, identityMap);
|
||||
|
||||
return await sink.WriteData(newName, JSON.stringify(data, null, 2), input.identity, context.config.to);
|
||||
return await sink.writeData(newName, JSON.stringify(data, null, 2), input.identity, context.config.to);
|
||||
}),
|
||||
);
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { PipelinePlugin } from "../../pipeline/common";
|
||||
import { AutorestContext } from "../../context";
|
||||
import { DataSource, DataSink, DataHandle, QuickDataSource } from "@azure-tools/datastore";
|
||||
import { DataSource, DataSink, QuickDataSource } from "@azure-tools/datastore";
|
||||
import { uniqBy } from "lodash";
|
||||
|
||||
/**
|
||||
|
@ -18,15 +18,15 @@ function insertIndexSuffix(name: string, suffix: number): string {
|
|||
}
|
||||
|
||||
async function resetIdentity(context: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map((x) => input.ReadStrict(x)));
|
||||
const numberEachFile = inputs.length > 1 && uniqBy(inputs, (each) => each.Description);
|
||||
const inputs = await Promise.all((await input.Enum()).map((x) => input.readStrict(x)));
|
||||
const numberEachFile = inputs.length > 1 && uniqBy(inputs, (each) => each.description);
|
||||
const result = await Promise.all(
|
||||
inputs.map(async (input, index) => {
|
||||
let name = `${context.config.name || input.Description}`;
|
||||
let name = `${context.config.name || input.description}`;
|
||||
if (numberEachFile) {
|
||||
name = insertIndexSuffix(name, index);
|
||||
}
|
||||
return await sink.WriteData(name, await input.ReadData(), input.identity, context.config.to);
|
||||
return await sink.writeData(name, await input.readData(), input.identity, context.config.to);
|
||||
}),
|
||||
);
|
||||
return new QuickDataSource(result, input.pipeState);
|
||||
|
|
|
@ -43,7 +43,7 @@ export async function loadOpenAPIFile(
|
|||
// TODO: Should we throw or send an error message?
|
||||
}
|
||||
config.Message({ Channel: Channel.Verbose, Text: `Reading OpenAPI 3.0 file ${inputFileUri}` });
|
||||
return sink.writeData(handle.description, await handle.readData(), [inputFileUri], "openapi-document", [], [handle]);
|
||||
return sink.writeData(handle.description, await handle.readData(), [inputFileUri], "openapi-document");
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -36,7 +36,7 @@ export async function LoadLiterateSwagger(
|
|||
}
|
||||
config.Message({ Channel: Channel.Verbose, Text: `Reading OpenAPI 2.0 file ${inputFileUri}` });
|
||||
|
||||
return sink.writeData(data.description, await data.readData(), [inputFileUri], "swagger-document", [], [data]);
|
||||
return sink.writeData(data.description, await data.readData(), [inputFileUri], "swagger-document");
|
||||
}
|
||||
|
||||
export function createSwaggerLoaderPlugin(): PipelinePlugin {
|
||||
|
|
|
@ -342,7 +342,7 @@ export class MultiAPIMerger extends Transformer<any, oai.Model> {
|
|||
url: hosts[0],
|
||||
description: "Default server",
|
||||
},
|
||||
pointer: `/servers/0`,
|
||||
recurse: false,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -538,6 +538,10 @@ async function merge(context: AutorestContext, input: DataSource, sink: DataSink
|
|||
// eslint-disable-next-line prefer-spread
|
||||
[].concat.apply([], <any>inputs.map((each) => each.identity)),
|
||||
"merged-oai3",
|
||||
{
|
||||
mappings: await processor.getSourceMappings(),
|
||||
mappingSources: inputs,
|
||||
},
|
||||
),
|
||||
],
|
||||
input.pipeState,
|
||||
|
|
|
@ -42,7 +42,7 @@ export function createCSharpReflectApiVersionPlugin(): PipelinePlugin {
|
|||
tuples = tuples.filter((x, i) => i === 0 || x !== tuples[i - 1]);
|
||||
|
||||
return new QuickDataSource([
|
||||
await sink.WriteData(
|
||||
await sink.writeData(
|
||||
`SdkInfo_${title}.cs`,
|
||||
`
|
||||
// <auto-generated>
|
||||
|
|
|
@ -479,12 +479,16 @@ async function compose(config: AutorestContext, input: DataSource, sink: DataSin
|
|||
const composer = new NewComposer(inputs[0]);
|
||||
return new QuickDataSource(
|
||||
[
|
||||
await sink.WriteObject(
|
||||
await sink.writeObject(
|
||||
"composed oai3 doc...",
|
||||
await composer.getOutput(),
|
||||
// eslint-disable-next-line prefer-spread
|
||||
[].concat.apply([], <any>inputs.map((each) => each.identity)),
|
||||
"merged-oai3",
|
||||
{
|
||||
mappings: await composer.getSourceMappings(),
|
||||
mappingSources: [inputs[0]],
|
||||
},
|
||||
),
|
||||
],
|
||||
input.pipeState,
|
||||
|
|
|
@ -481,7 +481,7 @@ async function filter(config: AutorestContext, input: DataSource, sink: DataSink
|
|||
config.GetEntry("output-artifact") === "profile-filter-log"
|
||||
) {
|
||||
result.push(
|
||||
await sink.WriteData(
|
||||
await sink.writeData(
|
||||
"profile-filter-log.yaml",
|
||||
serialize({ "files-used": [...specsReferencedAfterFiltering], "files-not-used": [...specsNotUsed] }),
|
||||
[],
|
||||
|
@ -491,11 +491,15 @@ async function filter(config: AutorestContext, input: DataSource, sink: DataSink
|
|||
}
|
||||
|
||||
result.push(
|
||||
await sink.WriteObject(
|
||||
await sink.writeObject(
|
||||
"oai3.profile-filtered.json",
|
||||
output,
|
||||
each.identity,
|
||||
"openapi3-document-profile-filtered",
|
||||
{
|
||||
mappings: await processor.getSourceMappings(),
|
||||
mappingSources: [each],
|
||||
},
|
||||
),
|
||||
);
|
||||
} else {
|
||||
|
|
|
@ -47,8 +47,7 @@ export async function crawlReferences(
|
|||
: secondaryFileContent.openapi
|
||||
? "openapi-document"
|
||||
: file.artifactType,
|
||||
[],
|
||||
[secondaryFile],
|
||||
{ mappings: [], mappingSources: [secondaryFile] },
|
||||
);
|
||||
|
||||
// crawl that and add it to the secondary set.
|
||||
|
@ -59,7 +58,10 @@ export async function crawlReferences(
|
|||
await Promise.all(refProcessor.promises);
|
||||
const mapping = await refProcessor.getSourceMappings();
|
||||
// write the file to the data sink (this serializes the file, so it has to be done by this point.)
|
||||
return sink.writeObject(file.description, output, file.identity, file.artifactType, mapping, [file]);
|
||||
return sink.writeObject(file.description, output, file.identity, file.artifactType, {
|
||||
mappings: mapping,
|
||||
mappingSources: [file],
|
||||
});
|
||||
}
|
||||
|
||||
// this seems a bit convoluted, but in order to not break the order that
|
||||
|
|
|
@ -364,23 +364,26 @@ async function deduplicateSubsetSchemas(config: AutorestContext, input: DataSour
|
|||
*/
|
||||
if (model.info?.["x-ms-metadata"]?.schemaReduced) {
|
||||
result.push(
|
||||
await sink.WriteObject(
|
||||
await sink.writeObject(
|
||||
"oai3.subset-schema-reduced.json",
|
||||
model,
|
||||
each.identity,
|
||||
"openapi-document-schema-reduced",
|
||||
[],
|
||||
),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
const processor = new SubsetSchemaDeduplicator(each);
|
||||
result.push(
|
||||
await sink.WriteObject(
|
||||
await sink.writeObject(
|
||||
"oai3.subset-schema-reduced.json",
|
||||
await processor.getOutput(),
|
||||
each.identity,
|
||||
"openapi-document-schema-reduced",
|
||||
{
|
||||
mappings: await processor.getSourceMappings(),
|
||||
mappingSources: [each],
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -40,14 +40,7 @@ export async function manipulateObject(
|
|||
const data = await src.ReadData();
|
||||
const newObject = transformer(null, data, []);
|
||||
if (newObject !== data) {
|
||||
const resultHandle = await target.WriteData(
|
||||
src.Description,
|
||||
newObject,
|
||||
src.identity,
|
||||
src.artifactType,
|
||||
undefined,
|
||||
mappingInfo ? [src, mappingInfo.transformerSourceHandle] : [src],
|
||||
);
|
||||
const resultHandle = await target.writeData(src.description, newObject, src.identity, src.artifactType);
|
||||
return {
|
||||
anyHit: true,
|
||||
result: resultHandle,
|
||||
|
@ -133,14 +126,10 @@ export async function manipulateObject(
|
|||
}
|
||||
|
||||
// write back
|
||||
const resultHandle = await target.WriteData(
|
||||
"manipulated",
|
||||
StringifyAst(ast),
|
||||
src.identity,
|
||||
undefined,
|
||||
mapping,
|
||||
mappingInfo ? [src, mappingInfo.transformerSourceHandle] : [src],
|
||||
);
|
||||
const resultHandle = await target.writeData("manipulated", StringifyAst(ast), src.identity, undefined, {
|
||||
mappings: mapping,
|
||||
mappingSources: mappingInfo ? [src, mappingInfo.transformerSourceHandle] : [src],
|
||||
});
|
||||
return {
|
||||
anyHit: true,
|
||||
result: resultHandle,
|
||||
|
|
|
@ -18,7 +18,7 @@ export function createGraphTransformerPlugin(): PipelinePlugin {
|
|||
for (const file of await input.Enum()) {
|
||||
const inputHandle = await input.Read(file);
|
||||
if (inputHandle) {
|
||||
const documentId = `/${inputHandle.Description || inputHandle.key}`;
|
||||
const documentId = `/${inputHandle.description || inputHandle.key}`;
|
||||
let contents: AnyObject | undefined = undefined;
|
||||
let modified = false;
|
||||
|
||||
|
@ -37,7 +37,7 @@ export function createGraphTransformerPlugin(): PipelinePlugin {
|
|||
// if the file should be processed, run it thru
|
||||
for (const transform of directive.transform) {
|
||||
// get the whole document
|
||||
contents = contents === undefined ? await inputHandle.ReadObjectFast() : contents;
|
||||
contents = contents === undefined ? await inputHandle.readObjectFast() : contents;
|
||||
|
||||
// find the target nodes in the document
|
||||
const targets = selectNodes(contents, where);
|
||||
|
@ -70,10 +70,10 @@ export function createGraphTransformerPlugin(): PipelinePlugin {
|
|||
|
||||
if (modified) {
|
||||
result.push(
|
||||
await sink.WriteObject(inputHandle.Description, contents, inputHandle.identity, inputHandle.artifactType),
|
||||
await sink.writeObject(inputHandle.description, contents, inputHandle.identity, inputHandle.artifactType),
|
||||
);
|
||||
} else {
|
||||
result.push(await sink.Forward(inputHandle.Description, inputHandle));
|
||||
result.push(await sink.forward(inputHandle.description, inputHandle));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -97,10 +97,10 @@ export function createTextTransformerPlugin(): PipelinePlugin {
|
|||
}
|
||||
|
||||
const result: Array<DataHandle> = [];
|
||||
for (const file of await input.Enum()) {
|
||||
const inputHandle = await input.Read(file);
|
||||
for (const file of await input.enum()) {
|
||||
const inputHandle = await input.read(file);
|
||||
if (inputHandle) {
|
||||
const documentId = `/${inputHandle.Description || inputHandle.key}`;
|
||||
const documentId = `/${inputHandle.description || inputHandle.key}`;
|
||||
let contents: string | undefined = undefined;
|
||||
let modified = false;
|
||||
|
||||
|
@ -162,7 +162,7 @@ export function createTextTransformerPlugin(): PipelinePlugin {
|
|||
),
|
||||
);
|
||||
} else {
|
||||
result.push(await sink.Forward(inputHandle.description, inputHandle));
|
||||
result.push(await sink.forward(inputHandle.description, inputHandle));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -194,6 +194,6 @@ export function createImmediateTransformerPlugin(): PipelinePlugin {
|
|||
const file = files[files.length - 1];
|
||||
const fileIn = await input.readStrict(file);
|
||||
const fileOut = await manipulator.process(fileIn, sink, isObject, fileIn.description);
|
||||
return new QuickDataSource([await sink.Forward("swagger-document", fileOut)], input.pipeState);
|
||||
return new QuickDataSource([await sink.forward("swagger-document", fileOut)], input.pipeState);
|
||||
};
|
||||
}
|
||||
|
|
|
@ -823,7 +823,12 @@ async function shakeTree(context: AutorestContext, input: DataSource, sink: Data
|
|||
},
|
||||
});
|
||||
|
||||
result.push(await sink.writeObject("oai3.shaken.json", output, each.identity, "openapi-document-shaken"));
|
||||
result.push(
|
||||
await sink.writeObject("oai3.shaken.json", output, each.identity, "openapi-document-shaken", {
|
||||
mappings: await shaker.getSourceMappings(),
|
||||
mappingSources: [each],
|
||||
}),
|
||||
);
|
||||
}
|
||||
return new QuickDataSource(result, input.pipeState);
|
||||
}
|
||||
|
|
|
@ -168,7 +168,10 @@ async function handleApiVersionParameter(config: AutorestContext, input: DataSou
|
|||
const processor = new ApiVersionParameterHandler(each);
|
||||
const output = await processor.getOutput();
|
||||
result.push(
|
||||
await sink.WriteObject("oai3.noapiversion.json", output, each.identity, "openapi-document-noapiversion"),
|
||||
await sink.writeObject("oai3.noapiversion.json", output, each.identity, "openapi-document-noapiversion", {
|
||||
mappings: await processor.getSourceMappings(),
|
||||
mappingSources: [each],
|
||||
}),
|
||||
);
|
||||
}
|
||||
return new QuickDataSource(result, input.pipeState);
|
||||
|
|
|
@ -4,17 +4,17 @@ import { createPerFilePlugin, PipelinePlugin } from "../pipeline/common";
|
|||
/* @internal */
|
||||
export function createYamlToJsonPlugin(): PipelinePlugin {
|
||||
return createPerFilePlugin(async () => async (fileIn, sink) => {
|
||||
let ast = await fileIn.ReadYamlAst();
|
||||
let ast = await fileIn.readYamlAst();
|
||||
ast = ConvertYaml2Jsonx(ast);
|
||||
return sink.WriteData(fileIn.Description, StringifyAst(ast), fileIn.identity);
|
||||
return sink.writeData(fileIn.description, StringifyAst(ast), fileIn.identity);
|
||||
});
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export function createJsonToYamlPlugin(): PipelinePlugin {
|
||||
return createPerFilePlugin(async () => async (fileIn, sink) => {
|
||||
let ast = await fileIn.ReadYamlAst();
|
||||
let ast = await fileIn.readYamlAst();
|
||||
ast = ConvertJsonx2Yaml(ast);
|
||||
return sink.WriteData(fileIn.Description, StringifyAst(ast), fileIn.identity);
|
||||
return sink.writeData(fileIn.description, StringifyAst(ast), fileIn.identity);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -147,5 +147,8 @@ export async function mergeYamls(
|
|||
throw new OperationAbortedException();
|
||||
}
|
||||
|
||||
return sink.WriteObject("merged YAMLs", mergedGraph, newIdentity, undefined, mappings, yamlInputHandles);
|
||||
return sink.writeObject("merged YAMLs", mergedGraph, newIdentity, undefined, {
|
||||
mappings: mappings,
|
||||
mappingSources: yamlInputHandles,
|
||||
});
|
||||
}
|
||||
|
|
|
@ -24,9 +24,10 @@ export async function parseCodeBlocksFromMarkdown(
|
|||
const data = codeBlock.literal || "";
|
||||
const mappings = getSourceMapForCodeBlock(hConfigFile.key, codeBlock);
|
||||
|
||||
const hCodeBlock = await sink.WriteData(codeBlockKey, data, hConfigFile.identity, undefined, mappings, [
|
||||
hConfigFile,
|
||||
]);
|
||||
const hCodeBlock = await sink.writeData(codeBlockKey, data, hConfigFile.identity, undefined, {
|
||||
mappings: mappings,
|
||||
mappingSources: [hConfigFile],
|
||||
});
|
||||
result.push({
|
||||
data: hCodeBlock,
|
||||
codeBlock,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { RawSourceMap, SourceMapGenerator } from "source-map";
|
||||
import { FastStringify } from "../yaml";
|
||||
import { fastStringify } from "../yaml";
|
||||
import { compileMapping, Mapping } from "../source-map/source-map";
|
||||
|
||||
import { DataHandle } from "./data-handle";
|
||||
|
@ -31,12 +31,13 @@ export class DataSink {
|
|||
data: string,
|
||||
identity: string[],
|
||||
artifact?: string,
|
||||
mappings: Mapping[] = [],
|
||||
mappingSources: DataHandle[] = [],
|
||||
mappings?: MappingParam,
|
||||
): Promise<DataHandle> {
|
||||
return this.writeDataWithSourceMap(description, data, artifact, identity, async (readHandle) => {
|
||||
const sourceMapGenerator = new SourceMapGenerator({ file: readHandle.key });
|
||||
await compileMapping(mappings, sourceMapGenerator, mappingSources.concat(readHandle));
|
||||
if (mappings) {
|
||||
await compileMapping(mappings.mappings, sourceMapGenerator, mappings.mappingSources.concat(readHandle));
|
||||
}
|
||||
return sourceMapGenerator.toJSON();
|
||||
});
|
||||
}
|
||||
|
@ -46,57 +47,20 @@ export class DataSink {
|
|||
obj: T,
|
||||
identity: Array<string>,
|
||||
artifact?: string,
|
||||
mappings: Array<Mapping> = [],
|
||||
mappingSources: Array<DataHandle> = [],
|
||||
mappings?: MappingParam,
|
||||
): Promise<DataHandle> {
|
||||
return this.writeData(description, FastStringify(obj), identity, artifact, mappings, mappingSources);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated use @see writeDataWithSourceMap
|
||||
*/
|
||||
public async WriteDataWithSourceMap(
|
||||
description: string,
|
||||
data: string,
|
||||
artifact: string | undefined,
|
||||
identity: Array<string>,
|
||||
sourceMapFactory: (readHandle: DataHandle) => Promise<RawSourceMap>,
|
||||
): Promise<DataHandle> {
|
||||
return this.writeDataWithSourceMap(description, data, artifact, identity, sourceMapFactory);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated use @see writeData
|
||||
*/
|
||||
public async WriteData(
|
||||
description: string,
|
||||
data: string,
|
||||
identity: Array<string>,
|
||||
artifact?: string,
|
||||
mappings: Array<Mapping> = [],
|
||||
mappingSources: Array<DataHandle> = [],
|
||||
): Promise<DataHandle> {
|
||||
return this.writeData(description, data, identity, artifact, mappings, mappingSources);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated use @see writeObject
|
||||
*/
|
||||
public WriteObject<T>(
|
||||
description: string,
|
||||
obj: T,
|
||||
identity: Array<string>,
|
||||
artifact?: string,
|
||||
mappings: Array<Mapping> = [],
|
||||
mappingSources: Array<DataHandle> = [],
|
||||
): Promise<DataHandle> {
|
||||
return this.writeObject(description, obj, identity, artifact, mappings, mappingSources);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated use @see forward
|
||||
*/
|
||||
public Forward(description: string, input: DataHandle): Promise<DataHandle> {
|
||||
return this.forward(description, input);
|
||||
return this.writeData(description, fastStringify(obj), identity, artifact, mappings);
|
||||
}
|
||||
}
|
||||
|
||||
export interface MappingParam {
|
||||
/**
|
||||
* List of mappings from original to generated
|
||||
*/
|
||||
mappings: Mapping[];
|
||||
|
||||
/**
|
||||
* Data handle of the source mapping.
|
||||
*/
|
||||
mappingSources: DataHandle[];
|
||||
}
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import { Mapping } from "source-map";
|
||||
import { JsonPointer, parseJsonPointer } from "./json-pointer/json-pointer";
|
||||
import { CreateAssignmentMapping } from "./source-map/source-map";
|
||||
import { JsonPointer } from "./json-pointer/json-pointer";
|
||||
import { createAssignmentMapping } from "./source-map/source-map";
|
||||
import { Exception } from "@azure-tools/tasks";
|
||||
import { parseJsonPointer } from "@azure-tools/json";
|
||||
|
||||
export function createGraphProxy<T extends object>(
|
||||
originalFileName: string,
|
||||
|
@ -17,10 +18,10 @@ export function createGraphProxy<T extends object>(
|
|||
subject: string | undefined,
|
||||
recurse: boolean,
|
||||
) => {
|
||||
CreateAssignmentMapping(
|
||||
createAssignmentMapping(
|
||||
value,
|
||||
filename,
|
||||
parseJsonPointer(pointer),
|
||||
parseJsonPointer(pointer).filter((each) => each !== ""),
|
||||
[...parseJsonPointer(targetPointer), key].filter((each) => each !== ""),
|
||||
subject || "",
|
||||
recurse,
|
||||
|
@ -28,22 +29,24 @@ export function createGraphProxy<T extends object>(
|
|||
);
|
||||
};
|
||||
|
||||
const push = (value: any) => {
|
||||
const push = (value: { pointer?: string; value: any; recurse?: boolean; filename?: string; subject?: string }) => {
|
||||
instance.push(value.value);
|
||||
const filename = value.filename || originalFileName;
|
||||
if (!filename) {
|
||||
throw new Error("Assignment: filename must be specified when there is no default.");
|
||||
}
|
||||
const pp = parseJsonPointer(value.pointer);
|
||||
const pp = value.pointer ? parseJsonPointer(value.pointer) : [];
|
||||
const q = <any>parseInt(pp[pp.length - 1], 10);
|
||||
if (q >= 0) {
|
||||
pp[pp.length - 1] = q;
|
||||
}
|
||||
CreateAssignmentMapping(
|
||||
createAssignmentMapping(
|
||||
value.value,
|
||||
filename,
|
||||
pp,
|
||||
[...parseJsonPointer(targetPointer), instance.length - 1].filter((each) => each !== ""),
|
||||
[...parseJsonPointer(targetPointer).filter((each) => each !== ""), instance.length - 1].filter(
|
||||
(each) => each !== "",
|
||||
),
|
||||
value.subject || "",
|
||||
value.recurse,
|
||||
mappings,
|
||||
|
|
|
@ -2,7 +2,7 @@ import { clone, values } from "@azure-tools/linq";
|
|||
import { Mapping } from "source-map";
|
||||
import { ProxyObject } from "./graph-builder";
|
||||
import { createGraphProxy, Node, ProxyNode, visit } from "./main";
|
||||
import { parseJsonPointer, serializeJsonPointer } from "./json-pointer";
|
||||
import { parseJsonPointer, serializeJsonPointer } from "@azure-tools/json";
|
||||
|
||||
export interface AnyObject {
|
||||
[key: string]: any;
|
||||
|
|
|
@ -91,14 +91,18 @@ export async function compileMapping(
|
|||
};
|
||||
|
||||
for (const mapping of mappings) {
|
||||
const compiledGenerated = await compilePos(mapping.generated, generatedFile);
|
||||
const compiledOriginal = await compilePos(mapping.original, mapping.source);
|
||||
target.addMapping({
|
||||
generated: compiledGenerated,
|
||||
original: compiledOriginal,
|
||||
name: encodeEnhancedPositionInName(mapping.name, compiledOriginal),
|
||||
source: mapping.source,
|
||||
});
|
||||
try {
|
||||
const compiledGenerated = await compilePos(mapping.generated, generatedFile);
|
||||
const compiledOriginal = await compilePos(mapping.original, mapping.source);
|
||||
target.addMapping({
|
||||
generated: compiledGenerated,
|
||||
original: compiledOriginal,
|
||||
name: encodeEnhancedPositionInName(mapping.name, compiledOriginal),
|
||||
source: mapping.source,
|
||||
});
|
||||
} catch {
|
||||
// Failed to acquire a mapping for the orignal or generated position(probably means the entry got added or removed) don't do anything.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -107,14 +111,14 @@ export async function compileMapping(
|
|||
* @description This does make an implicit assumption that the decendents of the 'generated' node are 1:1 with the descendents in the 'source' node.
|
||||
* In the event that is not true, elements in the target's source map will not be pointing to the correct elements in the source node.
|
||||
*/
|
||||
export function CreateAssignmentMapping(
|
||||
export function createAssignmentMapping(
|
||||
assignedObject: any,
|
||||
sourceKey: string,
|
||||
sourcePath: JsonPath,
|
||||
targetPath: JsonPath,
|
||||
subject: string,
|
||||
recurse = true,
|
||||
result = new Array<Mapping>(),
|
||||
result: Mapping[] = [],
|
||||
): Array<Mapping> {
|
||||
for (const descendant of Descendants(ToAst(assignedObject))) {
|
||||
const path = descendant.path;
|
||||
|
|
|
@ -217,13 +217,13 @@ export function CloneAst<T extends YAMLNode>(ast: T): T {
|
|||
return ParseToAst(StringifyAst(ast)) as T;
|
||||
}
|
||||
export function StringifyAst(ast: YAMLNode): string {
|
||||
return FastStringify(ParseNode<any>(ast));
|
||||
return fastStringify(ParseNode<any>(ast));
|
||||
}
|
||||
export function Clone<T>(object: T): T {
|
||||
if (object === undefined) {
|
||||
return object;
|
||||
}
|
||||
return Parse<T>(FastStringify(object));
|
||||
return Parse<T>(fastStringify(object));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -254,7 +254,7 @@ export function Normalize<T>(object: T): T {
|
|||
return clone;
|
||||
}
|
||||
export function ToAst<T>(object: T): YAMLNode {
|
||||
return ParseToAst(FastStringify(object));
|
||||
return ParseToAst(fastStringify(object));
|
||||
}
|
||||
|
||||
export function Parse<T>(
|
||||
|
@ -272,7 +272,7 @@ export function Stringify<T>(object: T): string {
|
|||
return "---\n" + dump(object, { skipInvalid: true });
|
||||
}
|
||||
|
||||
export function FastStringify<T>(obj: T): string {
|
||||
export function fastStringify<T>(obj: T): string {
|
||||
// has duplicate objects?
|
||||
const seen = new WeakSet();
|
||||
const losslessJsonSerializable = (o: any): boolean => {
|
||||
|
|
|
@ -56,6 +56,7 @@
|
|||
"dependencies": {
|
||||
"@azure-tools/datastore": "~4.3.1",
|
||||
"@azure-tools/openapi": "~3.1.3",
|
||||
"@azure-tools/json": "~1.0.0",
|
||||
"source-map": "0.7.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,6 @@ import {
|
|||
OpenAPI2ResponseHeader,
|
||||
OpenAPI2Operation,
|
||||
OpenAPI2OperationResponse,
|
||||
OpenAPI2Parameter,
|
||||
OpenAPI2BodyParameter,
|
||||
OpenApi2FormDataParameter,
|
||||
} from "./oai2";
|
||||
|
|
|
@ -1,16 +1,18 @@
|
|||
import { DataHandle, get } from "@azure-tools/datastore";
|
||||
import { DataHandle, Mapping } from "@azure-tools/datastore";
|
||||
import { getFromJsonPointer } from "@azure-tools/json";
|
||||
import { Oai2ToOai3 } from "../converter";
|
||||
import { OpenAPI2Document } from "../oai2";
|
||||
import { loadInputFiles } from "./utils";
|
||||
|
||||
export interface OaiToOai3FileInput {
|
||||
name: string;
|
||||
schema: OpenAPI2Document; // OAI2 type?
|
||||
schema: OpenAPI2Document;
|
||||
}
|
||||
|
||||
export interface OaiToOai3FileOutput {
|
||||
name: string;
|
||||
result: any; // OAI2 type?
|
||||
result: any; // OAI3 type?
|
||||
mappings: Mapping[];
|
||||
}
|
||||
|
||||
export const convertOai2ToOai3Files = async (inputFiles: DataHandle[]): Promise<OaiToOai3FileOutput[]> => {
|
||||
|
@ -35,7 +37,7 @@ export const convertOai2ToOai3 = async (inputs: Map<string, OaiToOai3FileInput>)
|
|||
throw new Error(`Ref file ${targetfile} doesn't exists.`);
|
||||
}
|
||||
|
||||
return get(file.schema, refPath);
|
||||
return getFromJsonPointer(file.schema, refPath);
|
||||
};
|
||||
|
||||
const computeFile = async (input: OaiToOai3FileInput) => {
|
||||
|
@ -45,12 +47,13 @@ export const convertOai2ToOai3 = async (inputs: Map<string, OaiToOai3FileInput>)
|
|||
}
|
||||
resolvingFiles.add(input.name);
|
||||
|
||||
const result = await convertOai2ToOai3Schema(input, resolveReference);
|
||||
const { result, mappings } = await convertOai2ToOai3Schema(input, resolveReference);
|
||||
completedFiles.set(input.name, {
|
||||
result,
|
||||
name: input.name,
|
||||
mappings,
|
||||
});
|
||||
return result;
|
||||
return { result, mappings };
|
||||
};
|
||||
|
||||
for (const input of inputs.values()) {
|
||||
|
@ -68,8 +71,13 @@ export type ResolveReferenceFn = (targetfile: string, reference: string) => Prom
|
|||
export const convertOai2ToOai3Schema = async (
|
||||
{ name, schema }: OaiToOai3FileInput,
|
||||
resolveReference: ResolveReferenceFn,
|
||||
): Promise<any> => {
|
||||
): Promise<Oai2ToOai3Result> => {
|
||||
const converter = new Oai2ToOai3(name, schema, resolveReference);
|
||||
await converter.convert();
|
||||
return converter.generated;
|
||||
return { result: converter.generated, mappings: converter.mappings };
|
||||
};
|
||||
|
||||
export interface Oai2ToOai3Result {
|
||||
result: any;
|
||||
mappings: Mapping[];
|
||||
}
|
||||
|
|
Загрузка…
Ссылка в новой задаче