зеркало из https://github.com/Azure/autorest.git
Fix: `$ref` with urlencoded (#4556)
This commit is contained in:
Родитель
b6b389a83c
Коммит
622e7af7ee
|
@ -0,0 +1,10 @@
|
||||||
|
{
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"packageName": "@autorest/core",
|
||||||
|
"comment": "Fix: `$ref` contains percent-encoding.",
|
||||||
|
"type": "patch"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"packageName": "@autorest/core"
|
||||||
|
}
|
|
@ -0,0 +1,10 @@
|
||||||
|
{
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"packageName": "@azure-tools/datastore",
|
||||||
|
"comment": "",
|
||||||
|
"type": "none"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"packageName": "@azure-tools/datastore"
|
||||||
|
}
|
|
@ -0,0 +1,10 @@
|
||||||
|
{
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"packageName": "@azure-tools/jsonschema",
|
||||||
|
"comment": "Fix: `$ref` contains percent-encoding.",
|
||||||
|
"type": "patch"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"packageName": "@azure-tools/jsonschema"
|
||||||
|
}
|
|
@ -1,5 +1,5 @@
|
||||||
import { IAutorestLogger, PluginUserError } from "@autorest/common";
|
import { IAutorestLogger, PluginUserError } from "@autorest/common";
|
||||||
import { DataSource, IdentityPathMappings, QuickDataSource } from "@azure-tools/datastore";
|
import { DataHandle, DataSource, IdentityPathMappings, QuickDataSource } from "@azure-tools/datastore";
|
||||||
import { InvalidJsonPointer } from "@azure-tools/json";
|
import { InvalidJsonPointer } from "@azure-tools/json";
|
||||||
import { parseJsonRef } from "@azure-tools/jsonschema";
|
import { parseJsonRef } from "@azure-tools/jsonschema";
|
||||||
import { createOpenAPIWorkspace, OpenAPIWorkspace } from "@azure-tools/openapi";
|
import { createOpenAPIWorkspace, OpenAPIWorkspace } from "@azure-tools/openapi";
|
||||||
|
@ -13,11 +13,17 @@ export function createFullRefResolverPlugin(): PipelinePlugin {
|
||||||
return async (context, input, sink) => {
|
return async (context, input, sink) => {
|
||||||
const files = await input.enum();
|
const files = await input.enum();
|
||||||
const dataHandles = await Promise.all(files.map((x) => input.readStrict(x)));
|
const dataHandles = await Promise.all(files.map((x) => input.readStrict(x)));
|
||||||
const specs = Object.fromEntries(
|
const specs: Record<string, { spec: any; dataHandle: DataHandle }> = Object.fromEntries(
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
dataHandles.map(async (dataHandle) => {
|
dataHandles.map(async (dataHandle) => {
|
||||||
const uri = resolveUri(dataHandle.originalDirectory, dataHandle.identity[0]);
|
const uri = resolveUri(dataHandle.originalDirectory, dataHandle.identity[0]);
|
||||||
return [uri, await dataHandle.readObject()];
|
return [
|
||||||
|
uri,
|
||||||
|
{
|
||||||
|
dataHandle,
|
||||||
|
spec: await dataHandle.readObject(),
|
||||||
|
},
|
||||||
|
];
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
|
@ -34,7 +40,7 @@ export function createFullRefResolverPlugin(): PipelinePlugin {
|
||||||
const results = await Promise.all(
|
const results = await Promise.all(
|
||||||
dataHandles.map(async (dataHandle) => {
|
dataHandles.map(async (dataHandle) => {
|
||||||
const uri = resolveUri(dataHandle.originalDirectory, dataHandle.identity[0]);
|
const uri = resolveUri(dataHandle.originalDirectory, dataHandle.identity[0]);
|
||||||
return sink.writeObject(dataHandle.description, specs[uri], dataHandle.identity, dataHandle.artifactType, {
|
return sink.writeObject(dataHandle.description, specs[uri].spec, dataHandle.identity, dataHandle.artifactType, {
|
||||||
pathMappings: new IdentityPathMappings(dataHandle.key),
|
pathMappings: new IdentityPathMappings(dataHandle.key),
|
||||||
});
|
});
|
||||||
}),
|
}),
|
||||||
|
@ -47,13 +53,15 @@ export function createFullRefResolverPlugin(): PipelinePlugin {
|
||||||
async function resolveRefs(
|
async function resolveRefs(
|
||||||
logger: IAutorestLogger,
|
logger: IAutorestLogger,
|
||||||
dataSource: DataSource,
|
dataSource: DataSource,
|
||||||
specs: Record<string, any>,
|
specs: Record<string, { dataHandle: DataHandle; spec: any }>,
|
||||||
options: RefProcessorOptions,
|
options: RefProcessorOptions,
|
||||||
) {
|
) {
|
||||||
const workspace = createOpenAPIWorkspace({ specs });
|
const workspace = createOpenAPIWorkspace({
|
||||||
|
specs: Object.fromEntries(Object.entries(specs).map(([k, { spec }]) => [k, spec])),
|
||||||
|
});
|
||||||
let success = true;
|
let success = true;
|
||||||
for (const [uri, spec] of Object.entries(specs)) {
|
for (const [uri, { dataHandle, spec }] of Object.entries(specs)) {
|
||||||
if (!(await crawlRefs(logger, dataSource, uri, spec, workspace, options))) {
|
if (!(await crawlRefs(logger, dataSource, dataHandle, uri, spec, workspace, options))) {
|
||||||
success = false;
|
success = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -63,6 +71,7 @@ async function resolveRefs(
|
||||||
async function crawlRefs(
|
async function crawlRefs(
|
||||||
logger: IAutorestLogger,
|
logger: IAutorestLogger,
|
||||||
dataSource: DataSource,
|
dataSource: DataSource,
|
||||||
|
dataHandle: DataHandle,
|
||||||
originalFileLocation: string,
|
originalFileLocation: string,
|
||||||
spec: any,
|
spec: any,
|
||||||
workspace: OpenAPIWorkspace<any>,
|
workspace: OpenAPIWorkspace<any>,
|
||||||
|
@ -94,8 +103,8 @@ async function crawlRefs(
|
||||||
success = false;
|
success = false;
|
||||||
logger.trackError({
|
logger.trackError({
|
||||||
code: "InvalidRef",
|
code: "InvalidRef",
|
||||||
message: `Ref '${value}' is not referencing a valid location.`,
|
message: `Ref '${value}' is not referencing a valid location. ${pointer}`,
|
||||||
source: [{ document: originalFileLocation, position: { path: pointer } }],
|
source: [{ document: dataHandle.key, position: { path: pointer } }],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
obj[key] = newReference;
|
obj[key] = newReference;
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import oai3, { ParameterLocation, Refable } from "@azure-tools/openapi";
|
import oai3, { OpenAPI3Document, ParameterLocation, Refable } from "@azure-tools/openapi";
|
||||||
import { ResolveReferenceFn, SemanticError, SemanticErrorCodes } from "../types";
|
import { ResolveReferenceFn, SemanticError, SemanticErrorCodes } from "../types";
|
||||||
import { createReferenceResolver } from "../utils";
|
import { createReferenceResolver } from "../utils";
|
||||||
|
|
||||||
|
@ -12,7 +12,7 @@ const operationKeys = new Set(["get", "post", "put", "delete", "options", "head"
|
||||||
* @param resolveReference
|
* @param resolveReference
|
||||||
* @returns
|
* @returns
|
||||||
*/
|
*/
|
||||||
export function validatePaths(spec: oai3.Model, resolve?: ResolveReferenceFn): SemanticError[] {
|
export function validatePaths(spec: OpenAPI3Document, resolve?: ResolveReferenceFn): SemanticError[] {
|
||||||
const resolveReference = createReferenceResolver(spec, resolve);
|
const resolveReference = createReferenceResolver(spec, resolve);
|
||||||
const paths = spec.paths;
|
const paths = spec.paths;
|
||||||
const errors: SemanticError[] = [];
|
const errors: SemanticError[] = [];
|
||||||
|
|
|
@ -14,6 +14,7 @@ import { PathMappedPosition } from "./path-source-map";
|
||||||
export class BlameTree {
|
export class BlameTree {
|
||||||
public static async create(dataStore: DataStore, position: MappedPosition | PathMappedPosition): Promise<BlameTree> {
|
public static async create(dataStore: DataStore, position: MappedPosition | PathMappedPosition): Promise<BlameTree> {
|
||||||
const data = dataStore.readStrictSync(position.source);
|
const data = dataStore.readStrictSync(position.source);
|
||||||
|
|
||||||
const blames = await data.blame(position as any);
|
const blames = await data.blame(position as any);
|
||||||
const children = [];
|
const children = [];
|
||||||
for (const pos of blames) {
|
for (const pos of blames) {
|
||||||
|
|
|
@ -13,6 +13,10 @@ describe("JsonSchema Refs", () => {
|
||||||
it("parse file only", () => {
|
it("parse file only", () => {
|
||||||
expect(parseJsonRef("bar.json")).toEqual({ file: "bar.json" });
|
expect(parseJsonRef("bar.json")).toEqual({ file: "bar.json" });
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("parse with path containing url encoded", () => {
|
||||||
|
expect(parseJsonRef("#/definitions/%24Foo")).toEqual({ path: "/definitions/$Foo" });
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("stringifyJsonRef", () => {
|
describe("stringifyJsonRef", () => {
|
||||||
|
|
|
@ -18,7 +18,10 @@ export type JsonRef =
|
||||||
*/
|
*/
|
||||||
export function parseJsonRef(ref: string): JsonRef {
|
export function parseJsonRef(ref: string): JsonRef {
|
||||||
const [file, path] = ref.split("#");
|
const [file, path] = ref.split("#");
|
||||||
return { file: file === "" ? undefined : file, path };
|
return {
|
||||||
|
file: file === "" ? undefined : file,
|
||||||
|
path: path === undefined ? (undefined as any) : decodeURIComponent(path),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
Загрузка…
Ссылка в новой задаче