зеркало из https://github.com/Azure/autorest.git
Refactoring configuration loading and creation of new @autorest/configuration package (#3848)
This commit is contained in:
Родитель
090986bd10
Коммит
2161d0cb67
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"changes": [
|
||||
{
|
||||
"packageName": "@autorest/configuration",
|
||||
"comment": "Initial release, include models only",
|
||||
"type": "patch"
|
||||
}
|
||||
],
|
||||
"packageName": "@autorest/configuration",
|
||||
"email": "tiguerin@microsoft.com"
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"changes": [
|
||||
{
|
||||
"packageName": "@autorest/core",
|
||||
"comment": "Rethink config",
|
||||
"type": "patch"
|
||||
}
|
||||
],
|
||||
"packageName": "@autorest/core",
|
||||
"email": "tiguerin@microsoft.com"
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"changes": [
|
||||
{
|
||||
"packageName": "autorest",
|
||||
"comment": "",
|
||||
"type": "none"
|
||||
}
|
||||
],
|
||||
"packageName": "autorest",
|
||||
"email": "tiguerin@microsoft.com"
|
||||
}
|
|
@ -10,6 +10,7 @@ dependencies:
|
|||
'@rush-temp/codegen': file:projects/codegen.tgz_prettier@2.2.1+ts-node@9.1.1
|
||||
'@rush-temp/codemodel': file:projects/codemodel.tgz_prettier@2.2.1
|
||||
'@rush-temp/compare': file:projects/compare.tgz_prettier@2.2.1
|
||||
'@rush-temp/configuration': file:projects/configuration.tgz_prettier@2.2.1+ts-node@9.1.1
|
||||
'@rush-temp/core': file:projects/core.tgz_ts-node@9.1.1
|
||||
'@rush-temp/datastore': file:projects/datastore.tgz_prettier@2.2.1+ts-node@9.1.1
|
||||
'@rush-temp/deduplication': file:projects/deduplication.tgz_prettier@2.2.1+ts-node@9.1.1
|
||||
|
@ -9934,6 +9935,28 @@ packages:
|
|||
integrity: sha512-Twa6pn0zBRB7MWM+rMR71EEQMYc7tM1iVAfStb1i04lQaN1DZZ/Eh4mb1jdPJE+/kR24PoKLxvMLjKm3rnvOXA==
|
||||
tarball: file:projects/compare.tgz
|
||||
version: 0.0.0
|
||||
file:projects/configuration.tgz_prettier@2.2.1+ts-node@9.1.1:
|
||||
dependencies:
|
||||
'@types/jest': 26.0.20
|
||||
'@types/node': 14.14.22
|
||||
'@typescript-eslint/eslint-plugin': 4.14.2_e5f964fa93e839b7a7927397f6cb9cb1
|
||||
'@typescript-eslint/parser': 4.14.2_eslint@7.19.0+typescript@4.1.3
|
||||
eslint: 7.19.0
|
||||
eslint-plugin-prettier: 3.2.0_eslint@7.19.0+prettier@2.2.1
|
||||
eslint-plugin-unicorn: 27.0.0_eslint@7.19.0
|
||||
jest: 26.6.3_ts-node@9.1.1
|
||||
rimraf: 3.0.2
|
||||
typescript: 4.1.3
|
||||
dev: false
|
||||
id: file:projects/configuration.tgz
|
||||
name: '@rush-temp/configuration'
|
||||
peerDependencies:
|
||||
prettier: '*'
|
||||
ts-node: '*'
|
||||
resolution:
|
||||
integrity: sha512-EVrxkL1FDh6fBjFjOyXeDB5zySa2JZVha4LWxkSDNs7OsT/0+8FgbxyN1JRQ0y/jTf2Im0iX0lp//ZCDYXM6DA==
|
||||
tarball: file:projects/configuration.tgz
|
||||
version: 0.0.0
|
||||
file:projects/core.tgz_ts-node@9.1.1:
|
||||
dependencies:
|
||||
'@azure-tools/async-io': 3.0.253
|
||||
|
@ -9983,7 +10006,7 @@ packages:
|
|||
peerDependencies:
|
||||
ts-node: '*'
|
||||
resolution:
|
||||
integrity: sha512-rClVUIcTSsreGuuNM2/Pzh9IIyvgfu9ODcO33u6vAnWTMxMSXfmDi3CVU+42AB6gfjIeWx6ufp/7VbfZ4UadRA==
|
||||
integrity: sha512-0p/B+FO7SiasvCnxLnWO1/PcCTNdl0R+Rtj7qoIPOqqJlGXvCE35gmKDaPTAO7IX4IGoIt4RlcAy+eXzZ+RvVw==
|
||||
tarball: file:projects/core.tgz
|
||||
version: 0.0.0
|
||||
file:projects/datastore.tgz_prettier@2.2.1+ts-node@9.1.1:
|
||||
|
@ -10204,6 +10227,7 @@ packages:
|
|||
file:projects/test-public-packages.tgz_prettier@2.2.1:
|
||||
dependencies:
|
||||
'@types/jest': 26.0.20
|
||||
'@types/node': 14.14.22
|
||||
'@types/source-map-support': 0.5.3
|
||||
eslint: 7.19.0
|
||||
eslint-plugin-prettier: 3.2.0_eslint@7.19.0+prettier@2.2.1
|
||||
|
@ -10219,7 +10243,7 @@ packages:
|
|||
peerDependencies:
|
||||
prettier: '*'
|
||||
resolution:
|
||||
integrity: sha512-jRwFyXU3CUmLSzRcEawNKokHJVQQiEk8FDC1iheNNd4MDG3KQZdt6RQ00w9w6PWOahHXnNsXcUjI58pkmCRMLQ==
|
||||
integrity: sha512-eYuOor8YA3sjAjGczARPVi+LUHqgCKxNRvxPaaEsuOUasEYQ8hn71h3JOYZQHpj0XHwAdFgfr9+zfyV/e/4Yrg==
|
||||
tarball: file:projects/test-public-packages.tgz
|
||||
version: 0.0.0
|
||||
file:projects/test-utils.tgz_prettier@2.2.1+ts-node@9.1.1:
|
||||
|
@ -10255,6 +10279,7 @@ specifiers:
|
|||
'@rush-temp/codegen': file:./projects/codegen.tgz
|
||||
'@rush-temp/codemodel': file:./projects/codemodel.tgz
|
||||
'@rush-temp/compare': file:./projects/compare.tgz
|
||||
'@rush-temp/configuration': file:./projects/configuration.tgz
|
||||
'@rush-temp/core': file:./projects/core.tgz
|
||||
'@rush-temp/datastore': file:./projects/datastore.tgz
|
||||
'@rush-temp/deduplication': file:./projects/deduplication.tgz
|
||||
|
|
|
@ -1,284 +1,7 @@
|
|||
// Dear @future_garrett:
|
||||
// Currently, the construction of this file is hand-constructed, and should really be automated in the future.
|
||||
// I would have done it today, but ... frankly, I don't see what the payoff is for me, when it's not my problem.
|
||||
//
|
||||
// Sincerly yours,
|
||||
// @fearthecowboy.
|
||||
|
||||
declare module "autorest-core" {
|
||||
import * as events from "events";
|
||||
|
||||
export interface Position {
|
||||
line: number;
|
||||
column: number;
|
||||
}
|
||||
export interface PositionEnhancements {
|
||||
path?: JsonPath;
|
||||
length?: number;
|
||||
valueOffset?: number;
|
||||
valueLength?: number;
|
||||
}
|
||||
|
||||
export type JsonPathComponent = string | number;
|
||||
export type JsonPath = Array<JsonPathComponent>;
|
||||
|
||||
/**
|
||||
* The Channel that a message is registered with.
|
||||
*/
|
||||
export enum Channel {
|
||||
/** Information is considered the mildest of responses; not necesarily actionable. */
|
||||
Information,
|
||||
|
||||
/** Warnings are considered important for best practices, but not catastrophic in nature. */
|
||||
Warning,
|
||||
|
||||
/** Errors are considered blocking issues that block a successful operation. */
|
||||
Error,
|
||||
|
||||
/** Debug messages are designed for the developer to communicate internal autorest implementation details. */
|
||||
Debug,
|
||||
|
||||
/** Verbose messages give the user additional clarity on the process. */
|
||||
Verbose,
|
||||
|
||||
/** Catastrophic failure, likely abending the process. */
|
||||
Fatal,
|
||||
|
||||
/** Hint messages offer guidance or support without forcing action. */
|
||||
Hint,
|
||||
|
||||
/** File represents a file output from an extension. Details are a Artifact and are required. */
|
||||
File,
|
||||
|
||||
/** content represents an update/creation of a configuration file. The final uri will be in the same folder as the primary config file. */
|
||||
Configuration,
|
||||
}
|
||||
|
||||
export type EnhancedPosition = Position & PositionEnhancements;
|
||||
|
||||
export interface SourceLocation {
|
||||
document: string;
|
||||
Position: EnhancedPosition;
|
||||
}
|
||||
export interface Range {
|
||||
document: string;
|
||||
start: Position;
|
||||
end: Position;
|
||||
}
|
||||
export interface Message {
|
||||
Channel: Channel;
|
||||
Key?: Iterable<string>;
|
||||
Details?: any;
|
||||
Text: string;
|
||||
Source?: Array<SourceLocation>;
|
||||
Range?: Iterable<Range>;
|
||||
Plugin?: string;
|
||||
FormattedMessage?: string;
|
||||
}
|
||||
|
||||
export type SmartPosition = Position | { path: JsonPath };
|
||||
|
||||
export interface Mapping {
|
||||
generated: SmartPosition;
|
||||
original: SmartPosition;
|
||||
source: string;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
interface RawSourceMap {
|
||||
version: number;
|
||||
sources: string[];
|
||||
names: string[];
|
||||
sourceRoot?: string;
|
||||
sourcesContent?: string[];
|
||||
mappings: string;
|
||||
file: string;
|
||||
}
|
||||
|
||||
export interface Artifact {
|
||||
uri: string;
|
||||
type: string;
|
||||
content: string;
|
||||
}
|
||||
|
||||
export interface ArtifactMessage extends Message {
|
||||
Details: Artifact & {
|
||||
sourceMap?: Array<Mapping> | RawSourceMap;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The results from calling the 'generate' method via the {@link AutoRestLanguageService/generate}
|
||||
*
|
||||
*/
|
||||
export interface GenerationResults {
|
||||
/** the array of messages produced from the run. */
|
||||
|
||||
messages: Array<string>;
|
||||
/** the collection of outputted files.
|
||||
*
|
||||
* Member keys are the file names
|
||||
* Member values are the file contents
|
||||
*
|
||||
* To Access the files:
|
||||
* for( const filename in generated.files ) {
|
||||
* const content = generated.files[filename];
|
||||
* /// ...
|
||||
* }
|
||||
*/
|
||||
files: Map<string, string>;
|
||||
}
|
||||
|
||||
export interface IFileSystem {
|
||||
EnumerateFileUris(folderUri: string): Promise<Array<string>>;
|
||||
ReadFile(uri: string): Promise<string>;
|
||||
}
|
||||
|
||||
class EventEmitter extends events.EventEmitter {
|
||||
}
|
||||
|
||||
export interface IEvent<TSender extends events.EventEmitter, TArgs> {
|
||||
Subscribe(fn: (sender: TSender, args: TArgs) => void): () => void;
|
||||
Unsubscribe(fn: (sender: TSender, args: TArgs) => void): void;
|
||||
Dispatch(args: TArgs): void;
|
||||
}
|
||||
|
||||
export interface MessageEmitter extends EventEmitter {
|
||||
/**
|
||||
* Event: Signals when a File is generated
|
||||
*/
|
||||
GeneratedFile: IEvent<MessageEmitter, Artifact>;
|
||||
/**
|
||||
* Event: Signals when a Folder is supposed to be cleared
|
||||
*/
|
||||
ClearFolder: IEvent<MessageEmitter, string>;
|
||||
/**
|
||||
* Event: Signals when a message is generated
|
||||
*/
|
||||
Message: IEvent<MessageEmitter, Message>;
|
||||
}
|
||||
|
||||
export interface Directive {
|
||||
from?: Array<string> | string;
|
||||
where?: Array<string> | string;
|
||||
reason?: string;
|
||||
suppress?: Array<string> | string;
|
||||
set?: Array<string> | string;
|
||||
transform?: Array<string> | string;
|
||||
test?: Array<string> | string;
|
||||
}
|
||||
|
||||
export interface DirectiveView {
|
||||
readonly from: Iterable<string>;
|
||||
readonly where: Iterable<string>;
|
||||
readonly reason: string | null;
|
||||
readonly suppress: Iterable<string>;
|
||||
readonly transform: Iterable<string>;
|
||||
readonly test: Iterable<string>;
|
||||
}
|
||||
|
||||
export interface AutoRestConfigurationImpl {
|
||||
__info?: string | null;
|
||||
'allow-no-input'?: boolean;
|
||||
'input-file'?: Array<string> | string;
|
||||
'base-folder'?: string;
|
||||
'directive'?: Array<Directive> | Directive;
|
||||
'declare-directive'?: {
|
||||
[name: string]: string;
|
||||
};
|
||||
'output-artifact'?: Array<string> | string;
|
||||
'message-format'?: 'json' | 'yaml' | 'regular';
|
||||
'use-extension'?: {
|
||||
[extensionName: string]: string;
|
||||
};
|
||||
'require'?: Array<string> | string;
|
||||
'try-require'?: Array<string> | string;
|
||||
'help'?: any;
|
||||
'vscode'?: any;
|
||||
'override-info'?: any;
|
||||
'title'?: any;
|
||||
'description'?: any;
|
||||
'debug'?: boolean;
|
||||
'verbose'?: boolean;
|
||||
'output-file'?: string;
|
||||
'output-folder'?: string;
|
||||
'client-side-validation'?: boolean;
|
||||
'fluent'?: boolean;
|
||||
'azure-arm'?: boolean;
|
||||
'namespace'?: string;
|
||||
'license-header'?: string;
|
||||
'add-credentials'?: boolean;
|
||||
'package-name'?: string;
|
||||
'package-version'?: string;
|
||||
'sync-methods'?: 'all' | 'essential' | 'none';
|
||||
'payload-flattening-threshold'?: number;
|
||||
'openapi-type'?: string;
|
||||
}
|
||||
|
||||
export interface ConfigurationView {
|
||||
configurationFiles: {
|
||||
[key: string]: any;
|
||||
};
|
||||
fileSystem: IFileSystem;
|
||||
messageEmitter: MessageEmitter;
|
||||
configFileFolderUri: string;
|
||||
[name: string]: any;
|
||||
readonly Keys: Array<string>;
|
||||
readonly UseExtensions: Array<{
|
||||
name: string;
|
||||
source: string;
|
||||
fullyQualified: string;
|
||||
}>;
|
||||
IncludedConfigurationFiles(fileSystem: IFileSystem, ignoreFiles: Set<string>): Promise<Array<string>>;
|
||||
readonly Directives: Array<DirectiveView>;
|
||||
readonly InputFileUris: Array<string>;
|
||||
readonly OutputFolderUri: string;
|
||||
IsOutputArtifactRequested(artifact: string): boolean;
|
||||
GetEntry(key: keyof AutoRestConfigurationImpl): any;
|
||||
readonly Raw: AutoRestConfigurationImpl;
|
||||
readonly DebugMode: boolean;
|
||||
readonly VerboseMode: boolean;
|
||||
readonly HelpRequested: boolean;
|
||||
GetNestedConfiguration(pluginName: string): Iterable<ConfigurationView>;
|
||||
GetNestedConfigurationImmediate(...scope: Array<any>): ConfigurationView;
|
||||
Message(m: Message): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* An instance of the AutoRest generator.
|
||||
*
|
||||
* Note: to create an instance of autore
|
||||
*/
|
||||
export interface AutoRest extends EventEmitter {
|
||||
configFileOrFolderUri?: string | undefined;
|
||||
/**
|
||||
* Event: Signals when a Process() finishes.
|
||||
*/
|
||||
Finished: IEvent<AutoRest, boolean | Error>;
|
||||
/**
|
||||
* Event: Signals when a File is generated
|
||||
*/
|
||||
GeneratedFile: IEvent<AutoRest, Artifact>;
|
||||
/**
|
||||
* Event: Signals when a Folder is supposed to be cleared
|
||||
*/
|
||||
ClearFolder: IEvent<AutoRest, string>;
|
||||
/**
|
||||
* Event: Signals when a message is generated
|
||||
*/
|
||||
Message: IEvent<AutoRest, Message>;
|
||||
readonly view: Promise<ConfigurationView>;
|
||||
RegenerateView(includeDefault?: boolean): Promise<ConfigurationView>;
|
||||
Invalidate(): void;
|
||||
AddConfiguration(configuration: any): void;
|
||||
ResetConfiguration(): Promise<void>;
|
||||
/**
|
||||
* Called to start processing of the files.
|
||||
*/
|
||||
Process(): {
|
||||
finish: Promise<boolean | Error>;
|
||||
cancel(): void;
|
||||
};
|
||||
}
|
||||
export type Message = any;
|
||||
export type Artifact = any;
|
||||
export type GenerationResults = any;
|
||||
export type IFileSystem = any;
|
||||
export type AutoRest = any;
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ export const configurationSpecifiedVersion = async (args: AutorestArgs, selected
|
|||
api.AddConfiguration(switches);
|
||||
|
||||
// resolve the configuration and return the version if there is one.
|
||||
return (await api.view).rawConfig.version;
|
||||
return (await api.view).rawConfig.version;
|
||||
}
|
||||
go();
|
||||
`,
|
||||
|
|
|
@ -39,6 +39,7 @@
|
|||
},
|
||||
"typings": "./dist/exports.d.ts",
|
||||
"devDependencies": {
|
||||
"@autorest/configuration": "~1.0.0",
|
||||
"@autorest/schemas": "~1.0.1",
|
||||
"@azure-tools/async-io": "~3.0.0",
|
||||
"@azure-tools/codegen": "~2.5.293",
|
||||
|
|
|
@ -55,12 +55,13 @@ import { join, resolve as currentDirectory } from "path";
|
|||
import { Help } from "./help";
|
||||
import { CreateConfiguration, isLegacy } from "./legacyCli";
|
||||
import { Artifact } from "./lib/artifact";
|
||||
import { AutoRest, ConfigurationView, IsOpenApiDocument, Shutdown } from "./lib/autorest-core";
|
||||
import { AutoRestRawConfiguration, mergeConfigurations } from "./lib/configuration";
|
||||
import { Exception, OperationCanceledException } from "./lib/exception";
|
||||
import { AutoRest, IsOpenApiDocument, Shutdown } from "./lib/autorest-core";
|
||||
import { mergeConfigurations } from "./lib/configuration";
|
||||
import { Exception } from "./lib/exception";
|
||||
import { Channel, Message } from "./lib/message";
|
||||
import { ShallowCopy } from "./lib/source-map/merging";
|
||||
import { homedir } from "os";
|
||||
import { AutorestRawConfiguration } from "@autorest/configuration";
|
||||
|
||||
let verbose = false;
|
||||
let debug = false;
|
||||
|
@ -76,7 +77,7 @@ async function legacyMain(autorestArgs: Array<string>): Promise<number> {
|
|||
// generate virtual config file
|
||||
const currentDirUri = CreateFolderUri(currentDirectory());
|
||||
const dataStore = new DataStore();
|
||||
let config: AutoRestRawConfiguration = {};
|
||||
let config: AutorestRawConfiguration = {};
|
||||
try {
|
||||
config = await CreateConfiguration(
|
||||
currentDirUri,
|
||||
|
@ -396,10 +397,10 @@ async function currentMain(autorestArgs: Array<string>): Promise<number> {
|
|||
let fastMode = false;
|
||||
const tasks = new Array<Promise<void>>();
|
||||
|
||||
const config = await api.view;
|
||||
const context = await api.view;
|
||||
|
||||
api.GeneratedFile.Subscribe((_, artifact) => {
|
||||
if (config.HelpRequested) {
|
||||
if (context.config.help) {
|
||||
artifacts.push(artifact);
|
||||
return;
|
||||
}
|
||||
|
@ -419,12 +420,12 @@ async function currentMain(autorestArgs: Array<string>): Promise<number> {
|
|||
api.ClearFolder.Subscribe((_, folder) => clearFolders.add(folder));
|
||||
|
||||
// maybe a resource schema batch process
|
||||
if (config["resource-schema-batch"]) {
|
||||
if (context.config["resource-schema-batch"]) {
|
||||
return resourceSchemaBatch(api);
|
||||
}
|
||||
fastMode = !!config["fast-mode"];
|
||||
fastMode = !!context.config["fast-mode"];
|
||||
|
||||
if (config["batch"]) {
|
||||
if (context.config["batch"]) {
|
||||
await batch(api);
|
||||
} else {
|
||||
const result = await api.Process().finish;
|
||||
|
@ -433,7 +434,7 @@ async function currentMain(autorestArgs: Array<string>): Promise<number> {
|
|||
}
|
||||
}
|
||||
|
||||
if (config.HelpRequested) {
|
||||
if (context.config.help) {
|
||||
// no fs operations on --help! Instead, format and print artifacts to console.
|
||||
// - print boilerplate help
|
||||
console.log("");
|
||||
|
@ -531,9 +532,9 @@ async function resourceSchemaBatch(api: AutoRest): Promise<number> {
|
|||
|
||||
// ask for the view without
|
||||
const config = await api.RegenerateView();
|
||||
for (const batchConfig of config.GetNestedConfiguration("resource-schema-batch")) {
|
||||
for (const batchContext of config.getNestedConfiguration("resource-schema-batch")) {
|
||||
// really, there should be only one
|
||||
for (const eachFile of batchConfig["input-file"]) {
|
||||
for (const eachFile of batchContext.config["input-file"] ?? []) {
|
||||
const path = ResolveUri(config.configFileFolderUri, eachFile);
|
||||
const content = await ReadUri(path);
|
||||
if (!(await IsOpenApiDocument(content))) {
|
||||
|
@ -571,7 +572,7 @@ async function resourceSchemaBatch(api: AutoRest): Promise<number> {
|
|||
subscribeMessages(instance, () => exitcode++);
|
||||
|
||||
// set configuration for that item
|
||||
instance.AddConfiguration(ShallowCopy(batchConfig, "input-file"));
|
||||
instance.AddConfiguration(ShallowCopy(batchContext, "input-file"));
|
||||
instance.AddConfiguration({ "input-file": eachFile });
|
||||
|
||||
console.log(`Running autorest for *${path}* `);
|
||||
|
|
|
@ -8,7 +8,7 @@ export { Message, Channel } from "./lib/message";
|
|||
export { Artifact } from "./lib/artifact";
|
||||
export {
|
||||
AutoRest,
|
||||
ConfigurationView,
|
||||
AutorestContext,
|
||||
IdentifyDocument,
|
||||
IsConfigurationExtension,
|
||||
IsConfigurationDocument,
|
||||
|
|
|
@ -165,7 +165,7 @@ class Result {
|
|||
await this.resetConfiguration(this.service.settings.configuration);
|
||||
|
||||
// get the list of files this is running on
|
||||
this.files = (await this.AutoRest.view).InputFileUris;
|
||||
this.files = (await this.AutoRest.view).config.inputFileUris;
|
||||
|
||||
// start it up!
|
||||
const processResult = this.AutoRest.Process();
|
||||
|
@ -703,7 +703,7 @@ class OpenApiLanguageService extends TextDocuments implements IFileSystem {
|
|||
// is there a config file that contains the document as an input?
|
||||
for (const configFile of configFiles) {
|
||||
const a = new AutoRest(this, configFile);
|
||||
const inputs = (await a.view).InputFileUris;
|
||||
const inputs = (await a.view).config.inputFileUris;
|
||||
for (const input of inputs) {
|
||||
if (input === documentUri || decodeURIComponent(input) == decodeURIComponent(documentUri)) {
|
||||
return configFile;
|
||||
|
|
|
@ -7,7 +7,7 @@ import { isAbsolute } from "path";
|
|||
|
||||
import { ResolveUri, GetFilenameWithoutExtension } from "@azure-tools/uri";
|
||||
import { DataSource } from "@azure-tools/datastore";
|
||||
import { AutoRestRawConfiguration } from "./lib/configuration";
|
||||
import { AutorestRawConfiguration } from "@autorest/configuration";
|
||||
|
||||
const regexLegacyArg = /^-[^-]/;
|
||||
|
||||
|
@ -18,7 +18,7 @@ const regexLegacyArg = /^-[^-]/;
|
|||
async function ParseCompositeSwagger(
|
||||
inputScope: DataSource,
|
||||
uri: string,
|
||||
targetConfig: AutoRestRawConfiguration,
|
||||
targetConfig: AutorestRawConfiguration,
|
||||
): Promise<void> {
|
||||
const compositeSwaggerFile = await inputScope.ReadStrict(uri);
|
||||
const data = await compositeSwaggerFile.ReadObject<{ info: any; documents: Array<string> }>();
|
||||
|
@ -33,8 +33,8 @@ async function ParseCompositeSwagger(
|
|||
baseFolderUri: string,
|
||||
inputScope: DataSource,
|
||||
args: Array<string>,
|
||||
): Promise<AutoRestRawConfiguration> {
|
||||
const result: AutoRestRawConfiguration = {
|
||||
): Promise<AutorestRawConfiguration> {
|
||||
const result: AutorestRawConfiguration = {
|
||||
"input-file": [],
|
||||
};
|
||||
|
||||
|
|
|
@ -3,12 +3,12 @@
|
|||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { ConfigurationLoader, ConfigurationView, MessageEmitter } from "./configuration";
|
||||
import { ConfigurationLoader, AutorestContext, MessageEmitter } from "./configuration";
|
||||
import { EventEmitter, IEvent } from "./events";
|
||||
import { Exception } from "./exception";
|
||||
import { IFileSystem, RealFileSystem } from "@azure-tools/datastore";
|
||||
import { runPipeline } from "./pipeline/pipeline";
|
||||
export { ConfigurationView } from "./configuration";
|
||||
export { AutorestContext } from "./configuration";
|
||||
import { homedir } from "os";
|
||||
import { Artifact } from "./artifact";
|
||||
import * as Constants from "./constants";
|
||||
|
@ -53,8 +53,8 @@ export class AutoRest extends EventEmitter {
|
|||
@EventEmitter.Event public Message!: IEvent<AutoRest, Message>;
|
||||
|
||||
private _configurations = new Array<any>();
|
||||
private _view: ConfigurationView | undefined;
|
||||
public get view(): Promise<ConfigurationView> {
|
||||
private _view: AutorestContext | undefined;
|
||||
public get view(): Promise<AutorestContext> {
|
||||
return this._view ? Promise.resolve(this._view) : this.RegenerateView(true);
|
||||
}
|
||||
|
||||
|
@ -69,7 +69,7 @@ export class AutoRest extends EventEmitter {
|
|||
process.env["autorest.home"] = process.env["autorest.home"] || homedir();
|
||||
}
|
||||
|
||||
public async RegenerateView(includeDefault = false): Promise<ConfigurationView> {
|
||||
public async RegenerateView(includeDefault = false): Promise<AutorestContext> {
|
||||
this.Invalidate();
|
||||
const messageEmitter = new MessageEmitter();
|
||||
|
||||
|
@ -110,7 +110,7 @@ export class AutoRest extends EventEmitter {
|
|||
let earlyCancel = false;
|
||||
let cancel: () => void = () => (earlyCancel = true);
|
||||
const processInternal = async () => {
|
||||
let view: ConfigurationView = <any>null;
|
||||
let view: AutorestContext = <any>null;
|
||||
try {
|
||||
// grab the current configuration view.
|
||||
view = await this.view;
|
||||
|
@ -125,14 +125,14 @@ export class AutoRest extends EventEmitter {
|
|||
view.messageEmitter.removeAllListeners();
|
||||
}
|
||||
};
|
||||
if (view.InputFileUris.length === 0) {
|
||||
if (view.config.inputFileUris.length === 0) {
|
||||
if (view.GetEntry("allow-no-input")) {
|
||||
this.Finished.Dispatch(true);
|
||||
return true;
|
||||
} else {
|
||||
// if this is using perform-load we don't need to require files.
|
||||
// if it's using batch, we might not have files in the main body
|
||||
if ((<any>view.Raw)["perform-load"] !== false) {
|
||||
if (view.config.raw["perform-load"] !== false) {
|
||||
return new Exception("No input files provided.\n\nUse --help to get help information.");
|
||||
}
|
||||
}
|
||||
|
@ -154,7 +154,7 @@ export class AutoRest extends EventEmitter {
|
|||
view.messageEmitter.removeAllListeners();
|
||||
return true;
|
||||
} catch (e) {
|
||||
const message = view.DebugMode
|
||||
const message = view.config.debug
|
||||
? {
|
||||
Channel: Channel.Debug,
|
||||
Text: `Process() cancelled due to exception : ${e.message ? e.message : e} / ${e.stack ? e.stack : ""}`,
|
||||
|
|
|
@ -0,0 +1,127 @@
|
|||
import { AutorestConfiguration, AutorestRawConfiguration, arrayOf } from "@autorest/configuration";
|
||||
import { IFileSystem } from "@azure-tools/datastore";
|
||||
import { CreateFileOrFolderUri, EnsureIsFolderUri, IsUri, ResolveUri } from "@azure-tools/uri";
|
||||
import { cwd } from "process";
|
||||
import { mergeConfigurations } from "./configuration-merging";
|
||||
|
||||
export const createAutorestConfiguration = async (
|
||||
configFileFolderUri: string,
|
||||
configurationFiles: { [key: string]: any },
|
||||
configs: AutorestRawConfiguration[],
|
||||
fileSystem: IFileSystem,
|
||||
): Promise<AutorestConfiguration> => {
|
||||
const initialConfig: AutorestRawConfiguration = {
|
||||
"directive": [],
|
||||
"input-file": [],
|
||||
"exclude-file": [],
|
||||
"profile": [],
|
||||
"output-artifact": [],
|
||||
"require": [],
|
||||
"try-require": [],
|
||||
"use": [],
|
||||
"pass-thru": [],
|
||||
};
|
||||
|
||||
const defaultConfig: AutorestRawConfiguration = {
|
||||
"base-folder": ".",
|
||||
"output-folder": "generated",
|
||||
"debug": false,
|
||||
"verbose": false,
|
||||
"disable-validation": false,
|
||||
};
|
||||
const rawConfig = mergeConfigurations(initialConfig, ...configs, defaultConfig);
|
||||
|
||||
const config: AutorestConfiguration = createConfigFromRawConfig(configFileFolderUri, rawConfig, configurationFiles);
|
||||
|
||||
const inputFiles = await Promise.all(
|
||||
arrayOf<string>(rawConfig["input-file"]).map((each) =>
|
||||
resolveAsPath(configFileFolderUri, config, each, fileSystem),
|
||||
),
|
||||
);
|
||||
const filesToExclude = await Promise.all(
|
||||
arrayOf<string>(rawConfig["exclude-file"]).map((each) =>
|
||||
resolveAsPath(configFileFolderUri, config, each, fileSystem),
|
||||
),
|
||||
);
|
||||
|
||||
config.inputFileUris = inputFiles.filter((x) => !filesToExclude.includes(x));
|
||||
return config;
|
||||
};
|
||||
|
||||
const createConfigFromRawConfig = (
|
||||
configFileFolderUri: string,
|
||||
rawConfig: AutorestRawConfiguration,
|
||||
configurationFiles: { [key: string]: string },
|
||||
): AutorestConfiguration => {
|
||||
const baseFolderUri = getBaseFolderUri(configFileFolderUri, rawConfig);
|
||||
|
||||
return {
|
||||
...rawConfig,
|
||||
raw: rawConfig,
|
||||
configFileFolderUri: configFileFolderUri,
|
||||
inputFileUris: [],
|
||||
configurationFiles: configurationFiles,
|
||||
outputFolderUri: resolveAsWriteableFolder(baseFolderUri, <string>rawConfig["output-folder"]),
|
||||
help: Boolean(rawConfig.help),
|
||||
verbose: Boolean(rawConfig.verbose),
|
||||
cachingEnabled: Boolean(rawConfig.cache),
|
||||
cacheExclude: getCacheExclude(rawConfig),
|
||||
debug: Boolean(rawConfig.debug),
|
||||
};
|
||||
};
|
||||
|
||||
const getCacheExclude = (config: AutorestRawConfiguration) => {
|
||||
const cache = config["cache"];
|
||||
return cache && cache.exclude ? arrayOf<string>(cache.exclude) : [];
|
||||
};
|
||||
|
||||
export const extendAutorestConfiguration = (
|
||||
config: AutorestConfiguration,
|
||||
overrides: AutorestRawConfiguration[],
|
||||
): AutorestConfiguration => {
|
||||
const rawConfig = mergeConfigurations(...overrides, config);
|
||||
const newConfig = createConfigFromRawConfig(config.configFileFolderUri, rawConfig, config.configurationFiles);
|
||||
newConfig.inputFileUris = config.inputFileUris;
|
||||
return newConfig;
|
||||
};
|
||||
|
||||
export const resolveAsPath = (
|
||||
configFileFolderUri: string,
|
||||
config: AutorestConfiguration,
|
||||
path: string,
|
||||
fileSystem: IFileSystem,
|
||||
): Promise<string> => {
|
||||
// is there even a potential for a parent folder from the input configuruation
|
||||
const parentFolder = config.__parents?.[path];
|
||||
const fromBaseUri = ResolveUri(getBaseFolderUri(configFileFolderUri, config), path);
|
||||
|
||||
// if it's an absolute uri already, give it back that way.
|
||||
if (IsUri(path) || !parentFolder) {
|
||||
return Promise.resolve(fromBaseUri);
|
||||
}
|
||||
|
||||
// let it try relative to the file that loaded it.
|
||||
// if the relative-to-parent path isn't valid, we fall back to original behavior
|
||||
// where the file path is relative to the base uri.
|
||||
// (and we don't even check to see if that's valid, try-require wouldn't need valid files)
|
||||
const fromLoadedFile = ResolveUri(parentFolder, path);
|
||||
return fileSystem.ReadFile(fromLoadedFile).then(
|
||||
() => fromLoadedFile,
|
||||
() => fromBaseUri,
|
||||
);
|
||||
};
|
||||
|
||||
export const getBaseFolderUri = (configFileFolderUri: string, config: AutorestRawConfiguration) =>
|
||||
EnsureIsFolderUri(ResolveUri(configFileFolderUri, <string>config["base-folder"]));
|
||||
|
||||
const resolveAsFolder = (baseFolderUri: string, path: string): string => {
|
||||
return EnsureIsFolderUri(ResolveUri(baseFolderUri, path));
|
||||
};
|
||||
|
||||
const resolveAsWriteableFolder = (baseFolderUri: string, path: string): string => {
|
||||
// relative paths are relative to the local folder when the base-folder is remote.
|
||||
if (!baseFolderUri.startsWith("file:")) {
|
||||
return EnsureIsFolderUri(ResolveUri(CreateFileOrFolderUri(cwd() + "/"), path));
|
||||
}
|
||||
return resolveAsFolder(baseFolderUri, path);
|
||||
};
|
|
@ -8,126 +8,61 @@ import {
|
|||
TryDecodeEnhancedPositionFromName,
|
||||
} from "@azure-tools/datastore";
|
||||
import { clone, values } from "@azure-tools/linq";
|
||||
import { EnsureIsFolderUri, ResolveUri, IsUri, FileUriToPath, CreateFileOrFolderUri } from "@azure-tools/uri";
|
||||
import { EnsureIsFolderUri, ResolveUri, IsUri, FileUriToPath } from "@azure-tools/uri";
|
||||
import { From } from "linq-es2015";
|
||||
import { basename, dirname } from "path";
|
||||
import { CancellationToken, CancellationTokenSource } from "vscode-jsonrpc";
|
||||
import { Artifact } from "../artifact";
|
||||
import { Channel, Message, Range, SourceLocation } from "../message";
|
||||
import { Suppressor } from "../pipeline/suppression";
|
||||
import { resolveRValue } from "../source-map/merging";
|
||||
import { cwd } from "process";
|
||||
import { Directive, ResolvedDirective } from "./directive";
|
||||
import { AutoRestRawConfiguration, mergeConfiguration, mergeConfigurations } from "./auto-rest-raw-configuration";
|
||||
import { arrayOf, valuesOf } from "./utils";
|
||||
import { Directive, ResolvedDirective } from "@autorest/configuration";
|
||||
import { CachingFileSystem } from "./caching-file-system";
|
||||
import { MessageEmitter } from "./message-emitter";
|
||||
import { IEvent } from "../events";
|
||||
|
||||
const RESOLVE_MACROS_AT_RUNTIME = true;
|
||||
import { createAutorestConfiguration, extendAutorestConfiguration } from "./autorest-configuration";
|
||||
import { AutorestConfiguration, AutorestRawConfiguration, arrayOf } from "@autorest/configuration";
|
||||
|
||||
const safeEval = createSandbox();
|
||||
|
||||
function ProxifyConfigurationView(cfgView: any) {
|
||||
return new Proxy(cfgView, {
|
||||
get: (target, property) => {
|
||||
const value = target[property];
|
||||
if (value && value instanceof Array) {
|
||||
return value.map((each) => resolveRValue(each, "", target, null));
|
||||
}
|
||||
return resolveRValue(value, <string>property, cfgView, null);
|
||||
},
|
||||
});
|
||||
}
|
||||
export const createAutorestContext = async (
|
||||
configurationFiles: { [key: string]: any },
|
||||
fileSystem: IFileSystem,
|
||||
messageEmitter: MessageEmitter,
|
||||
configFileFolderUri: string,
|
||||
...configs: AutorestRawConfiguration[]
|
||||
): Promise<AutorestContext> => {
|
||||
const cachingFs = fileSystem instanceof CachingFileSystem ? fileSystem : new CachingFileSystem(fileSystem);
|
||||
const config = await createAutorestConfiguration(configFileFolderUri, configurationFiles, configs, cachingFs);
|
||||
return new AutorestContext(config, cachingFs, messageEmitter, configFileFolderUri);
|
||||
};
|
||||
|
||||
export class ConfigurationView {
|
||||
[name: string]: any;
|
||||
public InputFileUris = new Array<string>();
|
||||
public fileSystem: CachingFileSystem;
|
||||
export class AutorestContext {
|
||||
public config: AutorestConfiguration;
|
||||
|
||||
private suppressor: Suppressor;
|
||||
|
||||
public constructor(
|
||||
public configurationFiles: { [key: string]: any },
|
||||
fileSystem: IFileSystem,
|
||||
config: AutorestConfiguration,
|
||||
public fileSystem: CachingFileSystem,
|
||||
public messageEmitter: MessageEmitter,
|
||||
public configFileFolderUri: string,
|
||||
...configs: Array<AutoRestRawConfiguration> // decreasing priority
|
||||
) {
|
||||
// wrap the filesystem with the caching filesystem
|
||||
this.fileSystem = fileSystem instanceof CachingFileSystem ? fileSystem : new CachingFileSystem(fileSystem);
|
||||
|
||||
// TODO: fix configuration loading, note that there was no point in passing that DataStore used
|
||||
// for loading in here as all connection to the sources is lost when passing `Array<AutoRestConfigurationImpl>` instead of `DataHandleRead`s...
|
||||
// theoretically the `ValuesOf` approach and such won't support blaming (who to blame if $.directives[3] sucks? which code block was it from)
|
||||
// long term, we simply gotta write a `Merge` method that adheres to the rules we need in here.
|
||||
this.rawConfig = <any>{
|
||||
"directive": [],
|
||||
"input-file": [],
|
||||
"exclude-file": [],
|
||||
"profile": [],
|
||||
"output-artifact": [],
|
||||
"require": [],
|
||||
"try-require": [],
|
||||
"use": [],
|
||||
"pass-thru": [],
|
||||
};
|
||||
|
||||
this.rawConfig = mergeConfigurations(this.rawConfig, ...configs);
|
||||
|
||||
// default values that are the least priority.
|
||||
// TODO: why is this here and not in default-configuration?
|
||||
this.rawConfig = mergeConfiguration(this.rawConfig, <any>{
|
||||
"base-folder": ".",
|
||||
"output-folder": "generated",
|
||||
"debug": false,
|
||||
"verbose": false,
|
||||
"disable-validation": false,
|
||||
});
|
||||
|
||||
if (RESOLVE_MACROS_AT_RUNTIME) {
|
||||
// if RESOLVE_MACROS_AT_RUNTIME is set
|
||||
// this will insert a Proxy object in most of the uses of
|
||||
// the configuration, and will do a macro resolution when the
|
||||
// value is retrieved.
|
||||
|
||||
// I have turned on this behavior by default. I'm not sure that
|
||||
// I need it at this point, but I'm leaving this code here since
|
||||
// It's possible that I do.
|
||||
this.config = ProxifyConfigurationView(this.rawConfig);
|
||||
} else {
|
||||
this.config = this.rawConfig;
|
||||
}
|
||||
this.config = config;
|
||||
this.suppressor = new Suppressor(this);
|
||||
|
||||
// treat this as a configuration property too.
|
||||
(<any>this.rawConfig).configurationFiles = configurationFiles;
|
||||
}
|
||||
|
||||
async init() {
|
||||
// after the view is created, we want to be able to do any last-minute
|
||||
// initialization (like, make sure intput-file uris are actually resolved)
|
||||
const inputFiles = await Promise.all(
|
||||
arrayOf<string>(this.config["input-file"]).map((each) => this.ResolveAsPath(each)),
|
||||
);
|
||||
const filesToExclude = await Promise.all(
|
||||
arrayOf<string>(this.config["exclude-file"]).map((each) => this.ResolveAsPath(each)),
|
||||
);
|
||||
|
||||
this.InputFileUris = inputFiles.filter((x) => !filesToExclude.includes(x));
|
||||
|
||||
return this;
|
||||
/**
|
||||
* @deprecated Use .config.raw instead. Keeping this for backward compatibility in the `autorest` module.
|
||||
*/
|
||||
public get rawConfig() {
|
||||
return this.config.raw;
|
||||
}
|
||||
|
||||
public get Keys(): Array<string> {
|
||||
return Object.getOwnPropertyNames(this.config);
|
||||
}
|
||||
|
||||
/* @internal */ public updateConfigurationFile(filename: string, content: string) {
|
||||
public updateConfigurationFile(filename: string, content: string) {
|
||||
// only name itself is allowed here, no path
|
||||
filename = basename(filename);
|
||||
|
||||
const keys = Object.getOwnPropertyNames(this.configurationFiles);
|
||||
const keys = Object.getOwnPropertyNames(this.config.configurationFiles);
|
||||
|
||||
if (keys && keys.length > 0) {
|
||||
const path = dirname(keys[0]);
|
||||
|
@ -148,14 +83,6 @@ export class ConfigurationView {
|
|||
}
|
||||
}
|
||||
|
||||
/* @internal */ public get Indexer(): ConfigurationView {
|
||||
return new Proxy<ConfigurationView>(this, {
|
||||
get: (target, property) => {
|
||||
return property in target.config ? (<any>target.config)[property] : this[<number | string>property];
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/* @internal */ public get DataStore(): DataStore {
|
||||
return this.messageEmitter.DataStore;
|
||||
}
|
||||
|
@ -172,21 +99,7 @@ export class ConfigurationView {
|
|||
return this.messageEmitter.ClearFolder;
|
||||
}
|
||||
|
||||
private config: AutoRestRawConfiguration;
|
||||
private rawConfig: AutoRestRawConfiguration;
|
||||
|
||||
private ResolveAsFolder(path: string): string {
|
||||
return EnsureIsFolderUri(ResolveUri(this.BaseFolderUri, path));
|
||||
}
|
||||
private ResolveAsWriteableFolder(path: string): string {
|
||||
// relative paths are relative to the local folder when the base-folder is remote.
|
||||
if (!this.BaseFolderUri.startsWith("file:")) {
|
||||
return EnsureIsFolderUri(ResolveUri(CreateFileOrFolderUri(cwd() + "/"), path));
|
||||
}
|
||||
return this.ResolveAsFolder(path);
|
||||
}
|
||||
|
||||
private ResolveAsPath(path: string): Promise<string> {
|
||||
public ResolveAsPath(path: string): Promise<string> {
|
||||
// is there even a potential for a parent folder from the input configuruation
|
||||
const parentFolder = this.config?.__parents?.[path];
|
||||
const fromBaseUri = ResolveUri(this.BaseFolderUri, path);
|
||||
|
@ -214,7 +127,7 @@ export class ConfigurationView {
|
|||
// public methods
|
||||
|
||||
public get UseExtensions(): Array<{ name: string; source: string; fullyQualified: string }> {
|
||||
const useExtensions = this.Indexer["use-extension"] || {};
|
||||
const useExtensions = this.config["use-extension"] || {};
|
||||
return Object.keys(useExtensions).map((name) => {
|
||||
const source = useExtensions[name].startsWith("file://")
|
||||
? FileUriToPath(useExtensions[name])
|
||||
|
@ -227,65 +140,9 @@ export class ConfigurationView {
|
|||
});
|
||||
}
|
||||
|
||||
public static async *getIncludedConfigurationFiles(
|
||||
configView: () => Promise<ConfigurationView>,
|
||||
fileSystem: IFileSystem,
|
||||
ignoreFiles: Set<string>,
|
||||
) {
|
||||
let done = false;
|
||||
|
||||
while (!done) {
|
||||
// get a fresh copy of the view every time we start the loop.
|
||||
const view = await configView();
|
||||
|
||||
// if we make it thru the list, we're done.
|
||||
done = true;
|
||||
for (const each of valuesOf<string>(view.config["require"])) {
|
||||
if (ignoreFiles.has(each)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// looks like we found one that we haven't handled yet.
|
||||
done = false;
|
||||
ignoreFiles.add(each);
|
||||
yield await view.ResolveAsPath(each);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
done = false;
|
||||
while (!done) {
|
||||
// get a fresh copy of the view every time we start the loop.
|
||||
const view = await configView();
|
||||
|
||||
// if we make it thru the list, we're done.
|
||||
done = true;
|
||||
for (const each of valuesOf<string>(view.config["try-require"])) {
|
||||
if (ignoreFiles.has(each)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// looks like we found one that we haven't handled yet.
|
||||
done = false;
|
||||
ignoreFiles.add(each);
|
||||
const path = await view.ResolveAsPath(each);
|
||||
try {
|
||||
if (await fileSystem.ReadFile(path)) {
|
||||
yield path;
|
||||
}
|
||||
} catch {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public resolveDirectives(predicate?: (each: ResolvedDirective) => boolean) {
|
||||
public resolveDirectives(predicate?: (each: ResolvedDirective) => boolean): ResolvedDirective[] {
|
||||
// optionally filter by predicate.
|
||||
const plainDirectives = values(valuesOf<Directive>(this.config["directive"]));
|
||||
// predicate ? values(valuesOf<Directive>(this.config['directive'])).where(predicate) : values(valuesOf<Directive>(this.config['directive']));
|
||||
const plainDirectives = values(arrayOf<Directive>(this.config["directive"]));
|
||||
|
||||
const declarations = this.config["declare-directive"] || {};
|
||||
const expandDirective = (dir: Directive): Iterable<Directive> => {
|
||||
|
@ -325,15 +182,11 @@ export class ConfigurationView {
|
|||
// return From(plainDirectives).SelectMany(expandDirective).Select(each => new StaticDirectiveView(each)).ToArray();
|
||||
}
|
||||
|
||||
public get OutputFolderUri(): string {
|
||||
return this.ResolveAsWriteableFolder(<string>this.config["output-folder"]);
|
||||
}
|
||||
|
||||
public get HeaderText(): string {
|
||||
const h = this.rawConfig["header-definitions"];
|
||||
const h = this.config["header-definitions"];
|
||||
const version = (<any>global).autorestVersion;
|
||||
|
||||
switch (this.rawConfig["license-header"]?.toLowerCase()) {
|
||||
switch (this.config["license-header"]?.toLowerCase()) {
|
||||
case "microsoft_mit":
|
||||
return `${h.microsoft}\n${h.mit}\n${h.default.replace("{core}", version)}\n${h.warning}`;
|
||||
|
||||
|
@ -363,96 +216,67 @@ export class ConfigurationView {
|
|||
return `${h.default.replace("{core}", version)}\n${h.warning}`;
|
||||
|
||||
default:
|
||||
return `${this.rawConfig["license-header"]}`;
|
||||
return `${this.config["license-header"]}`;
|
||||
}
|
||||
}
|
||||
|
||||
public IsOutputArtifactRequested(artifact: string): boolean {
|
||||
return From(valuesOf<string>(this.config["output-artifact"])).Contains(artifact);
|
||||
return From(arrayOf<string>(this.config["output-artifact"])).Contains(artifact);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the config value at the given path.
|
||||
* @param key Path to the config;
|
||||
*/
|
||||
public GetEntry(key: string): any {
|
||||
if (!key) {
|
||||
return clone(this.config);
|
||||
}
|
||||
|
||||
if (key === "resolved-directive") {
|
||||
return this.resolveDirectives();
|
||||
}
|
||||
if (<any>key === "header-text") {
|
||||
return this.HeaderText;
|
||||
}
|
||||
let result = <any>this.config;
|
||||
|
||||
let result = this.config;
|
||||
for (const keyPart of key.split(".")) {
|
||||
result = result[keyPart];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public get Raw(): AutoRestRawConfiguration {
|
||||
return this.config;
|
||||
}
|
||||
|
||||
public get DebugMode(): boolean {
|
||||
return !!this.config["debug"];
|
||||
}
|
||||
|
||||
public get CacheMode(): boolean {
|
||||
return !!this.config["cache"];
|
||||
}
|
||||
|
||||
public get CacheExclude(): Array<string> {
|
||||
const cache = this.config["cache"];
|
||||
if (cache && cache.exclude) {
|
||||
return [...valuesOf<string>(cache.exclude)];
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
public get VerboseMode(): boolean {
|
||||
return !!this.config["verbose"];
|
||||
}
|
||||
|
||||
public get HelpRequested(): boolean {
|
||||
return !!this.config["help"];
|
||||
}
|
||||
|
||||
public *GetNestedConfiguration(pluginName: string): Iterable<ConfigurationView> {
|
||||
public *getNestedConfiguration(pluginName: string): Iterable<AutorestContext> {
|
||||
const pp = pluginName.split(".");
|
||||
if (pp.length > 1) {
|
||||
const n = this.GetNestedConfiguration(pp[0]);
|
||||
const n = this.getNestedConfiguration(pp[0]);
|
||||
for (const s of n) {
|
||||
yield* s.GetNestedConfiguration(pp.slice(1).join("."));
|
||||
yield* s.getNestedConfiguration(pp.slice(1).join("."));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
for (const section of valuesOf<any>((<any>this.config)[pluginName])) {
|
||||
for (const section of arrayOf<any>(this.config.raw[pluginName])) {
|
||||
if (section) {
|
||||
yield this.GetNestedConfigurationImmediate(section === true ? {} : section);
|
||||
yield this.extendWith(section === true ? {} : section);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public GetNestedConfigurationImmediate(...scope: Array<any>): ConfigurationView {
|
||||
const c = new ConfigurationView(
|
||||
this.configurationFiles,
|
||||
this.fileSystem,
|
||||
this.messageEmitter,
|
||||
this.configFileFolderUri,
|
||||
...scope,
|
||||
this.config,
|
||||
);
|
||||
c.InputFileUris = this.InputFileUris;
|
||||
return c.Indexer;
|
||||
/**
|
||||
* Returns a new Autorest context with the configuration extended with the provided configurations.
|
||||
* @param overrides List of configs to override
|
||||
*/
|
||||
public extendWith(...overrides: AutorestRawConfiguration[]): AutorestContext {
|
||||
const nestedConfig = extendAutorestConfiguration(this.config, overrides);
|
||||
return new AutorestContext(nestedConfig, this.fileSystem, this.messageEmitter, this.configFileFolderUri);
|
||||
}
|
||||
|
||||
// message pipeline (source map resolution, filter, ...)
|
||||
public async Message(m: Message): Promise<void> {
|
||||
if (m.Channel === Channel.Debug && !this.DebugMode) {
|
||||
if (m.Channel === Channel.Debug && !this.config.debug) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (m.Channel === Channel.Verbose && !this.VerboseMode) {
|
||||
if (m.Channel === Channel.Verbose && !this.config.verbose) {
|
||||
return;
|
||||
}
|
||||
|
|
@ -14,12 +14,12 @@ import { Channel, SourceLocation } from "../message";
|
|||
import { parseCodeBlocks } from "../parsing/literate-yaml";
|
||||
import { AutoRestExtension } from "../pipeline/plugin-endpoint";
|
||||
import { AppRoot } from "../constants";
|
||||
import { AutoRestRawConfiguration } from "./auto-rest-raw-configuration";
|
||||
import { arrayOf } from "./utils";
|
||||
import { ConfigurationView } from "./configuration-view";
|
||||
import { AutorestRawConfiguration, arrayOf } from "@autorest/configuration";
|
||||
import { AutorestContext, createAutorestContext } from "./autorest-context";
|
||||
import { CachingFileSystem } from "./caching-file-system";
|
||||
import { MessageEmitter } from "./message-emitter";
|
||||
import { detectConfigurationFile } from "./configuration-file-resolver";
|
||||
import { getIncludedConfigurationFiles } from "./loading-utils";
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const untildify: (path: string) => string = require("untildify");
|
||||
|
@ -48,8 +48,8 @@ export class ConfigurationLoader {
|
|||
|
||||
private async parseCodeBlocks(
|
||||
configFile: DataHandle,
|
||||
contextConfig: ConfigurationView,
|
||||
): Promise<Array<AutoRestRawConfiguration>> {
|
||||
contextConfig: AutorestContext,
|
||||
): Promise<Array<AutorestRawConfiguration>> {
|
||||
const parentFolder = ParentFolderUri(configFile.originalFullPath);
|
||||
|
||||
// load config
|
||||
|
@ -63,7 +63,7 @@ export class ConfigurationLoader {
|
|||
hConfig
|
||||
.filter((each) => each)
|
||||
.map((each) => {
|
||||
const pBlock = each.data.ReadObject<AutoRestRawConfiguration>();
|
||||
const pBlock = each.data.ReadObject<AutorestRawConfiguration>();
|
||||
return pBlock.then((block) => {
|
||||
if (!block) {
|
||||
block = {};
|
||||
|
@ -201,7 +201,7 @@ export class ConfigurationLoader {
|
|||
messageEmitter: MessageEmitter,
|
||||
includeDefault: boolean,
|
||||
...configs: Array<any>
|
||||
): Promise<ConfigurationView> {
|
||||
): Promise<AutorestContext> {
|
||||
const configFileUri =
|
||||
this.fileSystem && this.configFileOrFolderUri
|
||||
? await detectConfigurationFile(this.fileSystem, this.configFileOrFolderUri, messageEmitter)
|
||||
|
@ -211,18 +211,19 @@ export class ConfigurationLoader {
|
|||
: this.configFileOrFolderUri || "file:///";
|
||||
|
||||
const configurationFiles: { [key: string]: any } = {};
|
||||
const configSegments: Array<any> = [];
|
||||
const secondPass: Array<any> = [];
|
||||
const configSegments: AutorestRawConfiguration[] = [];
|
||||
const secondPass: AutorestRawConfiguration[] = [];
|
||||
|
||||
const createView = (segments: Array<any> = configSegments) => {
|
||||
return new ConfigurationView(
|
||||
return createAutorestContext(
|
||||
configurationFiles,
|
||||
this.fileSystem,
|
||||
messageEmitter,
|
||||
configFileFolderUri,
|
||||
...segments,
|
||||
).init();
|
||||
);
|
||||
};
|
||||
|
||||
const addSegments = async (configs: Array<any>, keepInSecondPass = true): Promise<Array<any>> => {
|
||||
const segs = await this.desugarRawConfigs(configs);
|
||||
configSegments.push(...segs);
|
||||
|
@ -231,6 +232,7 @@ export class ConfigurationLoader {
|
|||
}
|
||||
return segs;
|
||||
};
|
||||
|
||||
const fsInputView = messageEmitter.DataStore.GetReadThroughScope(this.fileSystem);
|
||||
|
||||
// 1. overrides (CLI, ...)
|
||||
|
@ -251,11 +253,7 @@ export class ConfigurationLoader {
|
|||
// 3. resolve 'require'd configuration
|
||||
const addedConfigs = new Set<string>();
|
||||
const includeFn = async (fsToUse: IFileSystem) => {
|
||||
for await (let additionalConfig of ConfigurationView.getIncludedConfigurationFiles(
|
||||
createView,
|
||||
fsToUse,
|
||||
addedConfigs,
|
||||
)) {
|
||||
for await (let additionalConfig of getIncludedConfigurationFiles(createView, fsToUse, addedConfigs)) {
|
||||
// acquire additional configs
|
||||
try {
|
||||
additionalConfig = simplifyUri(additionalConfig);
|
||||
|
@ -307,9 +305,9 @@ export class ConfigurationLoader {
|
|||
const addedExtensions = new Set<string>();
|
||||
|
||||
const resolveExtensions = async () => {
|
||||
const viewsToHandle: Array<ConfigurationView> = [await createView()];
|
||||
const viewsToHandle: Array<AutorestContext> = [await createView()];
|
||||
while (viewsToHandle.length > 0) {
|
||||
const tmpView = <ConfigurationView>viewsToHandle.pop();
|
||||
const tmpView = <AutorestContext>viewsToHandle.pop();
|
||||
const additionalExtensions = tmpView.UseExtensions.filter((ext) => !addedExtensions.has(ext.fullyQualified));
|
||||
await addSegments([{ "used-extension": tmpView.UseExtensions.map((x) => x.fullyQualified) }]);
|
||||
if (additionalExtensions.length === 0) {
|
||||
|
@ -337,7 +335,7 @@ export class ConfigurationLoader {
|
|||
|
||||
// trim off the '@org' and 'autorest.' from the name.
|
||||
const shortname = additionalExtension.name.split("/").last.replace(/^autorest\./gi, "");
|
||||
const view = [...(await createView()).GetNestedConfiguration(shortname)];
|
||||
const view = [...(await createView()).getNestedConfiguration(shortname)];
|
||||
const enableDebugger = view.length > 0 ? <boolean>view[0].GetEntry("debugger") : false;
|
||||
|
||||
// Add a hint here to make legacy users to be aware that the default version has been bumped to 3.0+.
|
||||
|
@ -480,12 +478,12 @@ export class ConfigurationLoader {
|
|||
await includeFn(this.fileSystem);
|
||||
await resolveExtensions();
|
||||
|
||||
return (await createView([...configs, ...blocks, ...secondPass])).Indexer;
|
||||
return await createView([...configs, ...blocks, ...secondPass]);
|
||||
}
|
||||
|
||||
await resolveExtensions();
|
||||
|
||||
// return the final view
|
||||
return (await createView()).Indexer;
|
||||
return await createView();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
import { evaluateGuard } from "../parsing/literate-yaml";
|
||||
import { MergeOverwriteOrAppend } from "../source-map/merging";
|
||||
import { AutorestRawConfiguration } from "@autorest/configuration";
|
||||
|
||||
export const mergeConfigurations = (...configs: Array<AutorestRawConfiguration>): AutorestRawConfiguration => {
|
||||
let result: AutorestRawConfiguration = {};
|
||||
configs = configs
|
||||
.map((each, i, a) => ({ ...each, "load-priority": each["load-priority"] || -i }))
|
||||
.sort((a, b) => b["load-priority"] - a["load-priority"]);
|
||||
// if they say --profile: or --api-version: (or in config) then we force it to set the tag=all-api-versions
|
||||
// Some of the rest specs had a default tag set (really shouldn't have done that), which ... was problematic,
|
||||
// so this enables us to override that in the case they are asking for filtering to a profile or a api-verison
|
||||
|
||||
const forceAllVersionsMode = !!configs.find((each) => each["api-version"]?.length || each.profile?.length || 0 > 0);
|
||||
for (const config of configs) {
|
||||
result = mergeConfiguration(result, config, forceAllVersionsMode);
|
||||
}
|
||||
result["load-priority"] = undefined;
|
||||
return result;
|
||||
};
|
||||
|
||||
// TODO: operate on DataHandleRead and create source map!
|
||||
export const mergeConfiguration = (
|
||||
higherPriority: AutorestRawConfiguration,
|
||||
lowerPriority: AutorestRawConfiguration,
|
||||
forceAllVersionsMode = false,
|
||||
): AutorestRawConfiguration => {
|
||||
// check guard
|
||||
if (lowerPriority.__info && !evaluateGuard(lowerPriority.__info, higherPriority, forceAllVersionsMode)) {
|
||||
// guard false? => skip
|
||||
return higherPriority;
|
||||
}
|
||||
|
||||
// merge
|
||||
return MergeOverwriteOrAppend(higherPriority, lowerPriority);
|
||||
};
|
|
@ -1,6 +1,6 @@
|
|||
export * from "./configuration-loader";
|
||||
export * from "./auto-rest-raw-configuration";
|
||||
export * from "./configuration-view";
|
||||
export * from "./configuration-merging";
|
||||
export * from "./autorest-configuration";
|
||||
export * from "./autorest-context";
|
||||
export * from "./message-emitter";
|
||||
export * from "./directive";
|
||||
export * from "./configuration-file-resolver";
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
import { arrayOf } from "@autorest/configuration";
|
||||
import { IFileSystem } from "@azure-tools/datastore";
|
||||
import { AutorestContext } from "./autorest-context";
|
||||
|
||||
export async function* getIncludedConfigurationFiles(
|
||||
configView: () => Promise<AutorestContext>,
|
||||
fileSystem: IFileSystem,
|
||||
ignoreFiles: Set<string>,
|
||||
) {
|
||||
let done = false;
|
||||
|
||||
while (!done) {
|
||||
// get a fresh copy of the view every time we start the loop.
|
||||
const view = await configView();
|
||||
|
||||
// if we make it thru the list, we're done.
|
||||
done = true;
|
||||
for (const each of arrayOf<string>(view.config["require"])) {
|
||||
if (ignoreFiles.has(each)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// looks like we found one that we haven't handled yet.
|
||||
done = false;
|
||||
ignoreFiles.add(each);
|
||||
yield await view.ResolveAsPath(each);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
done = false;
|
||||
while (!done) {
|
||||
// get a fresh copy of the view every time we start the loop.
|
||||
const view = await configView();
|
||||
|
||||
// if we make it thru the list, we're done.
|
||||
done = true;
|
||||
for (const each of arrayOf<string>(view.config["try-require"])) {
|
||||
if (ignoreFiles.has(each)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// looks like we found one that we haven't handled yet.
|
||||
done = false;
|
||||
ignoreFiles.add(each);
|
||||
const path = await view.ResolveAsPath(each);
|
||||
try {
|
||||
if (await fileSystem.ReadFile(path)) {
|
||||
yield path;
|
||||
}
|
||||
} catch {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,52 +0,0 @@
|
|||
export function isIterable(target: any): target is Iterable<any> {
|
||||
return !!target && typeof target[Symbol.iterator] === "function";
|
||||
}
|
||||
|
||||
export function* valuesOf<T>(value: any): Iterable<T> {
|
||||
switch (typeof value) {
|
||||
case "string":
|
||||
yield <T>(<any>value);
|
||||
break;
|
||||
|
||||
case "object":
|
||||
if (value) {
|
||||
if (isIterable(value)) {
|
||||
yield* value;
|
||||
} else {
|
||||
yield value;
|
||||
}
|
||||
return;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
if (value) {
|
||||
yield value;
|
||||
}
|
||||
}
|
||||
/* rewrite
|
||||
if (value === undefined) {
|
||||
return [];
|
||||
}
|
||||
if (value instanceof Array) {
|
||||
return value;
|
||||
}
|
||||
return [value];
|
||||
*/
|
||||
}
|
||||
|
||||
export function arrayOf<T>(value: any): Array<T> {
|
||||
if (value === undefined) {
|
||||
return [];
|
||||
}
|
||||
switch (typeof value) {
|
||||
case "string":
|
||||
return [<T>(<any>value)];
|
||||
case "object":
|
||||
if (isIterable(value)) {
|
||||
return [...value];
|
||||
}
|
||||
break;
|
||||
}
|
||||
return [<T>value];
|
||||
}
|
|
@ -11,7 +11,7 @@ import {
|
|||
createSandbox,
|
||||
StrictJsonSyntaxCheck,
|
||||
} from "@azure-tools/datastore";
|
||||
import { ConfigurationView } from "../autorest-core";
|
||||
import { AutorestContext } from "../autorest-core";
|
||||
import { OperationAbortedException } from "../exception";
|
||||
import { Channel, SourceLocation } from "../message";
|
||||
import { MergeYamls, resolveRValue } from "../source-map/merging";
|
||||
|
@ -29,26 +29,26 @@ function tryMarkdown(rawMarkdownOrYaml: string): boolean {
|
|||
return /^#/gm.test(rawMarkdownOrYaml);
|
||||
}
|
||||
|
||||
export async function parse(config: ConfigurationView, literate: DataHandle, sink: DataSink): Promise<DataHandle> {
|
||||
export async function parse(config: AutorestContext, literate: DataHandle, sink: DataSink): Promise<DataHandle> {
|
||||
return parseInternal(config, literate, sink);
|
||||
}
|
||||
|
||||
export async function parseCodeBlocks(
|
||||
config: ConfigurationView,
|
||||
config: AutorestContext,
|
||||
literate: DataHandle,
|
||||
sink: DataSink,
|
||||
): Promise<Array<CodeBlock>> {
|
||||
return parseCodeBlocksInternal(config, literate, sink);
|
||||
}
|
||||
|
||||
async function parseInternal(config: ConfigurationView, hLiterate: DataHandle, sink: DataSink): Promise<DataHandle> {
|
||||
async function parseInternal(config: AutorestContext, hLiterate: DataHandle, sink: DataSink): Promise<DataHandle> {
|
||||
// merge the parsed codeblocks
|
||||
const blocks = (await parseCodeBlocksInternal(config, hLiterate, sink)).map((each) => each.data);
|
||||
return MergeYamls(config, blocks, sink);
|
||||
}
|
||||
|
||||
async function parseCodeBlocksInternal(
|
||||
config: ConfigurationView,
|
||||
config: AutorestContext,
|
||||
hLiterate: DataHandle,
|
||||
sink: DataSink,
|
||||
): Promise<Array<CodeBlock>> {
|
||||
|
@ -119,7 +119,7 @@ export function evaluateGuard(rawFenceGuard: string, contextObject: any, forceAl
|
|||
isLoaded: (name: string) => {
|
||||
return (
|
||||
contextObject["used-extension"] &&
|
||||
!!contextObject["used-extension"].find((each) => each.startsWith(`["${name}"`))
|
||||
!!contextObject["used-extension"].find((each: any) => each.startsWith(`["${name}"`))
|
||||
);
|
||||
},
|
||||
|
||||
|
|
|
@ -4,14 +4,14 @@
|
|||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { DataHandle, DataSink, DataSource, QuickDataSource } from "@azure-tools/datastore";
|
||||
import { ConfigurationView } from "../configuration";
|
||||
import { AutorestContext } from "../configuration";
|
||||
import { length } from "@azure-tools/linq";
|
||||
|
||||
export type PipelinePlugin = (config: ConfigurationView, input: DataSource, sink: DataSink) => Promise<DataSource>;
|
||||
export type PipelinePlugin = (config: AutorestContext, input: DataSource, sink: DataSink) => Promise<DataSource>;
|
||||
|
||||
/** @internal */
|
||||
export function createPerFilePlugin(
|
||||
processorBuilder: (config: ConfigurationView) => Promise<(input: DataHandle, sink: DataSink) => Promise<DataHandle>>,
|
||||
processorBuilder: (config: AutorestContext) => Promise<(input: DataHandle, sink: DataSink) => Promise<DataHandle>>,
|
||||
): PipelinePlugin {
|
||||
return async (config, input, sink) => {
|
||||
const processor = await processorBuilder(config);
|
||||
|
|
|
@ -22,8 +22,8 @@ function decorateSpecialProperties(o: any): void {
|
|||
export function createComponentModifierPlugin(): PipelinePlugin {
|
||||
const noWireExtension = "x-ms-no-wire";
|
||||
|
||||
return createPerFilePlugin(async (config) => async (fileIn, sink) => {
|
||||
const componentModifier = Clone((<any>config.Raw).components);
|
||||
return createPerFilePlugin(async (context) => async (fileIn, sink) => {
|
||||
const componentModifier = Clone(context.config.raw.components);
|
||||
if (componentModifier) {
|
||||
const o = await fileIn.ReadObject<any>();
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ import {
|
|||
PipeState,
|
||||
mergePipeStates,
|
||||
} from "@azure-tools/datastore";
|
||||
import { ConfigurationView, getExtension } from "../configuration";
|
||||
import { AutorestContext, getExtension } from "../configuration";
|
||||
import { Channel } from "../message";
|
||||
import { OutstandingTaskAwaiter } from "../outstanding-task-awaiter";
|
||||
import { PipelinePlugin } from "./common";
|
||||
|
@ -71,11 +71,11 @@ interface PipelineNode {
|
|||
}
|
||||
|
||||
function buildPipeline(
|
||||
config: ConfigurationView,
|
||||
): { pipeline: { [name: string]: PipelineNode }; configs: { [jsonPath: string]: ConfigurationView } } {
|
||||
const cfgPipeline = config.GetEntry(<any>"pipeline");
|
||||
context: AutorestContext,
|
||||
): { pipeline: { [name: string]: PipelineNode }; configs: { [jsonPath: string]: AutorestContext } } {
|
||||
const cfgPipeline = context.GetEntry("pipeline");
|
||||
const pipeline: { [name: string]: PipelineNode } = {};
|
||||
const configCache: { [jsonPath: string]: ConfigurationView } = {};
|
||||
const configCache: { [jsonPath: string]: AutorestContext } = {};
|
||||
|
||||
// Resolves a pipeline stage name using the current stage's name and the relative name.
|
||||
// It considers the actually existing pipeline stages.
|
||||
|
@ -148,7 +148,7 @@ function buildPipeline(
|
|||
) => {
|
||||
if (inputNodes.length === 0) {
|
||||
const config = configCache[stringify(configScope)];
|
||||
const configs = scope ? [...config.GetNestedConfiguration(scope)] : [config];
|
||||
const configs = scope ? [...config.getNestedConfiguration(scope)] : [config];
|
||||
for (let i = 0; i < configs.length; ++i) {
|
||||
const newSuffix = configs.length === 1 ? "" : "/" + i;
|
||||
suffixes.push(suffix + newSuffix);
|
||||
|
@ -184,7 +184,7 @@ function buildPipeline(
|
|||
}
|
||||
};
|
||||
|
||||
configCache[stringify([])] = config;
|
||||
configCache[stringify([])] = context;
|
||||
addNodesAndSuffixes("", [], [], inputs.map(createNodesAndSuffixes));
|
||||
|
||||
return { name: stageName, suffixes: (cfg.suffixes = suffixes) };
|
||||
|
@ -212,7 +212,7 @@ function isDrainRequired(p: PipelineNode) {
|
|||
return false;
|
||||
}
|
||||
|
||||
export async function runPipeline(configView: ConfigurationView, fileSystem: IFileSystem): Promise<void> {
|
||||
export async function runPipeline(configView: AutorestContext, fileSystem: IFileSystem): Promise<void> {
|
||||
// built-in plugins
|
||||
const plugins: { [name: string]: PipelinePlugin } = {
|
||||
"help": createHelpPlugin(),
|
||||
|
@ -249,7 +249,7 @@ export async function runPipeline(configView: ConfigurationView, fileSystem: IFi
|
|||
new QuickDataSource([
|
||||
await configView.DataStore.getDataSink().WriteObject(
|
||||
"configuration",
|
||||
configView.Raw,
|
||||
configView.config.raw,
|
||||
["fix-me-4"],
|
||||
"configuration",
|
||||
),
|
||||
|
@ -271,7 +271,7 @@ export async function runPipeline(configView: ConfigurationView, fileSystem: IFi
|
|||
|
||||
// dynamically loaded, auto-discovered plugins
|
||||
const __extensionExtension: { [pluginName: string]: AutoRestExtension } = {};
|
||||
for (const useExtensionQualifiedName of configView.GetEntry(<any>"used-extension") || []) {
|
||||
for (const useExtensionQualifiedName of configView.GetEntry("used-extension") || []) {
|
||||
const extension = await getExtension(useExtensionQualifiedName);
|
||||
for (const plugin of await extension.GetPluginNames(configView.CancellationToken)) {
|
||||
if (!plugins[plugin]) {
|
||||
|
@ -283,7 +283,7 @@ export async function runPipeline(configView: ConfigurationView, fileSystem: IFi
|
|||
|
||||
// __status scope
|
||||
const startTime = Date.now();
|
||||
(<any>configView.Raw).__status = new Proxy<any>(
|
||||
configView.config.raw.__status = new Proxy<any>(
|
||||
{},
|
||||
{
|
||||
get(_, key) {
|
||||
|
@ -316,12 +316,11 @@ export async function runPipeline(configView: ConfigurationView, fileSystem: IFi
|
|||
|
||||
const fsInput = configView.DataStore.GetReadThroughScope(fileSystem);
|
||||
const pipeline = buildPipeline(configView);
|
||||
const times = !!configView["timestamp"];
|
||||
const times = !!configView.config["timestamp"];
|
||||
const tasks: { [name: string]: Promise<DataSource> } = {};
|
||||
|
||||
const ScheduleNode: (nodeName: string) => Promise<DataSource> = async (nodeName) => {
|
||||
const node = pipeline.pipeline[nodeName];
|
||||
|
||||
if (!node) {
|
||||
throw new Error(`Cannot find pipeline node ${nodeName}.`);
|
||||
}
|
||||
|
@ -355,16 +354,16 @@ export async function runPipeline(configView: ConfigurationView, fileSystem: IFi
|
|||
break;
|
||||
}
|
||||
|
||||
const config = pipeline.configs[stringify(node.configScope)];
|
||||
const context = pipeline.configs[stringify(node.configScope)];
|
||||
const pluginName = node.pluginName;
|
||||
|
||||
// you can have --pass-thru:FOO on the command line
|
||||
// or add pass-thru: true in a pipline configuration step.
|
||||
const passthru =
|
||||
config.GetEntry(node.configScope.last.toString())["pass-thru"] === true ||
|
||||
context.GetEntry(node.configScope.last.toString())["pass-thru"] === true ||
|
||||
values(configView.GetEntry("pass-thru")).any((each) => each === pluginName);
|
||||
const usenull =
|
||||
config.GetEntry(node.configScope.last.toString())["null"] === true ||
|
||||
context.GetEntry(node.configScope.last.toString())["null"] === true ||
|
||||
values(configView.GetEntry("null")).any((each) => each === pluginName);
|
||||
|
||||
const plugin = usenull ? plugins.null : passthru ? plugins.identity : plugins[pluginName];
|
||||
|
@ -374,60 +373,60 @@ export async function runPipeline(configView: ConfigurationView, fileSystem: IFi
|
|||
}
|
||||
|
||||
if (inputScope.skip) {
|
||||
config.Message({ Channel: Channel.Debug, Text: `${nodeName} - SKIPPING` });
|
||||
context.Message({ Channel: Channel.Debug, Text: `${nodeName} - SKIPPING` });
|
||||
return inputScope;
|
||||
}
|
||||
try {
|
||||
let cacheKey: string | undefined;
|
||||
|
||||
if (config.CacheMode) {
|
||||
if (context.config.cachingEnabled) {
|
||||
// generate the key used to store/access cached content
|
||||
const names = await inputScope.Enum();
|
||||
const data = (
|
||||
await Promise.all(names.map((name) => inputScope.ReadStrict(name).then((uri) => md5(uri.ReadData()))))
|
||||
).sort();
|
||||
|
||||
cacheKey = md5([config.configFileFolderUri, nodeName, ...data].join("«"));
|
||||
cacheKey = md5([context.configFileFolderUri, nodeName, ...data].join("«"));
|
||||
}
|
||||
|
||||
// if caching is enabled, see if we can find a scopeResult in the cache first.
|
||||
// key = inputScope names + md5(inputScope content)
|
||||
if (
|
||||
config.CacheMode &&
|
||||
context.config.cachingEnabled &&
|
||||
inputScope.cachable &&
|
||||
config.CacheExclude.indexOf(nodeName) === -1 &&
|
||||
context.config.cacheExclude.indexOf(nodeName) === -1 &&
|
||||
(await isCached(cacheKey))
|
||||
) {
|
||||
// shortcut -- get the outputs directly from the cache.
|
||||
config.Message({
|
||||
context.Message({
|
||||
Channel: times ? Channel.Information : Channel.Debug,
|
||||
Text: `${nodeName} - CACHED inputs = ${(await inputScope.Enum()).length} [0.0 s]`,
|
||||
});
|
||||
|
||||
return await readCache(cacheKey, config.DataStore.getDataSink(node.outputArtifact));
|
||||
return await readCache(cacheKey, context.DataStore.getDataSink(node.outputArtifact));
|
||||
}
|
||||
|
||||
const t1 = process.uptime() * 100;
|
||||
config.Message({
|
||||
context.Message({
|
||||
Channel: times ? Channel.Information : Channel.Debug,
|
||||
Text: `${nodeName} - START inputs = ${(await inputScope.Enum()).length}`,
|
||||
});
|
||||
|
||||
// creates the actual plugin.
|
||||
const scopeResult = await plugin(config, inputScope, config.DataStore.getDataSink(node.outputArtifact));
|
||||
const scopeResult = await plugin(context, inputScope, context.DataStore.getDataSink(node.outputArtifact));
|
||||
const t2 = process.uptime() * 100;
|
||||
|
||||
config.Message({
|
||||
context.Message({
|
||||
Channel: times ? Channel.Information : Channel.Debug,
|
||||
Text: `${nodeName} - END [${Math.floor(t2 - t1) / 100} s]`,
|
||||
});
|
||||
|
||||
// if caching is enabled, let's cache this scopeResult.
|
||||
if (config.CacheMode && cacheKey) {
|
||||
if (context.config.cachingEnabled && cacheKey) {
|
||||
await writeCache(cacheKey, scopeResult);
|
||||
}
|
||||
// if this node wasn't able to load from the cache, then subsequent nodes shall not either
|
||||
if (!inputScope.cachable || config.CacheExclude.indexOf(nodeName) !== -1) {
|
||||
if (!inputScope.cachable || context.config.cacheExclude.indexOf(nodeName) !== -1) {
|
||||
try {
|
||||
scopeResult.cachable = false;
|
||||
} catch {
|
||||
|
@ -437,7 +436,7 @@ export async function runPipeline(configView: ConfigurationView, fileSystem: IFi
|
|||
|
||||
return scopeResult;
|
||||
} catch (e) {
|
||||
if (configView.DebugMode) {
|
||||
if (configView.config.debug) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(`${__filename} - FAILURE ${JSON.stringify(e)}`);
|
||||
}
|
||||
|
|
|
@ -9,8 +9,8 @@ import { ChildProcess, fork } from "child_process";
|
|||
import { RawSourceMap } from "source-map";
|
||||
import { Readable, Writable } from "stream";
|
||||
import { CancellationToken, createMessageConnection } from "vscode-jsonrpc";
|
||||
import { ConfigurationView } from "../../exports";
|
||||
import { Artifact } from "../artifact";
|
||||
import { AutorestContext } from "../configuration";
|
||||
import { EventEmitter } from "../events";
|
||||
import { Exception } from "../exception";
|
||||
import { ArtifactMessage, Channel, Message } from "../message";
|
||||
|
@ -192,7 +192,7 @@ export class AutoRestExtension extends EventEmitter {
|
|||
public async Process(
|
||||
pluginName: string,
|
||||
configuration: (key: string) => any,
|
||||
configurationView: ConfigurationView,
|
||||
context: AutorestContext,
|
||||
inputScope: DataSource,
|
||||
sink: DataSink,
|
||||
onFile: (data: DataHandle) => void,
|
||||
|
@ -205,7 +205,7 @@ export class AutoRestExtension extends EventEmitter {
|
|||
this.apiInitiatorEndpoints[sid] = AutoRestExtension.CreateEndpointFor(
|
||||
pluginName,
|
||||
configuration,
|
||||
configurationView,
|
||||
context,
|
||||
inputScope,
|
||||
sink,
|
||||
onFile,
|
||||
|
@ -228,7 +228,7 @@ export class AutoRestExtension extends EventEmitter {
|
|||
private static CreateEndpointFor(
|
||||
pluginName: string,
|
||||
configuration: (key: string) => any,
|
||||
configurationView: ConfigurationView,
|
||||
context: AutorestContext,
|
||||
inputScope: DataSource,
|
||||
sink: DataSink,
|
||||
onFile: (data: DataHandle) => void,
|
||||
|
@ -303,9 +303,7 @@ export class AutoRestExtension extends EventEmitter {
|
|||
} catch (E) {
|
||||
// try getting the file from the output-folder
|
||||
try {
|
||||
const result = await configurationView.fileSystem.ReadFile(
|
||||
`${configurationView.OutputFolderUri}${filename}`,
|
||||
);
|
||||
const result = await context.fileSystem.ReadFile(`${context.config.outputFolderUri}${filename}`);
|
||||
return result;
|
||||
} catch (E2) {
|
||||
// no file there!
|
||||
|
@ -355,10 +353,10 @@ export class AutoRestExtension extends EventEmitter {
|
|||
}
|
||||
|
||||
// we'd like to be able to ask the host for a file directly (but only if it's supposed to be in the output-folder)
|
||||
const t = configurationView.OutputFolderUri.length;
|
||||
const t = context.config.outputFolderUri.length;
|
||||
return (
|
||||
await configurationView.fileSystem.EnumerateFileUris(
|
||||
EnsureIsFolderUri(`${configurationView.OutputFolderUri}${artifactType || ""}`),
|
||||
await context.fileSystem.EnumerateFileUris(
|
||||
EnsureIsFolderUri(`${context.config.outputFolderUri}${artifactType || ""}`),
|
||||
)
|
||||
).map((each) => each.substr(t));
|
||||
},
|
||||
|
@ -403,7 +401,7 @@ export class AutoRestExtension extends EventEmitter {
|
|||
if (message.Key && message.Text) {
|
||||
const key = [...message.Key];
|
||||
if (key.length > 0) {
|
||||
configurationView.updateConfigurationFile(key[0], message.Text);
|
||||
context.updateConfigurationFile(key[0], message.Text);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ import {
|
|||
} from "@azure-tools/datastore";
|
||||
import { Model, isReference, Refable, Schema } from "@azure-tools/openapi";
|
||||
|
||||
import { ConfigurationView } from "../../configuration";
|
||||
import { AutorestContext } from "../../configuration";
|
||||
import { PipelinePlugin } from "../common";
|
||||
import { values, length, items } from "@azure-tools/linq";
|
||||
|
||||
|
@ -44,7 +44,7 @@ export class AllOfCleaner {
|
|||
}
|
||||
}
|
||||
|
||||
async function allofCleaner(config: ConfigurationView, input: DataSource, sink: DataSink) {
|
||||
async function allofCleaner(config: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map(async (x) => input.ReadStrict(x)));
|
||||
const result: Array<DataHandle> = [];
|
||||
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
import { PipelinePlugin } from "../common";
|
||||
import { ConfigurationView } from "../../configuration";
|
||||
import { AutorestContext } from "../../configuration";
|
||||
import { DataSource, DataSink, DataHandle, QuickDataSource } from "@azure-tools/datastore";
|
||||
import { execute, cmdlineToArray } from "@azure-tools/codegen";
|
||||
import { FileUriToPath } from "@azure-tools/uri";
|
||||
|
||||
async function command(config: ConfigurationView, input: DataSource, sink: DataSink) {
|
||||
const c = (<any>config.Raw).run;
|
||||
async function command(context: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const c = context.config.raw.run;
|
||||
const commands = Array.isArray(c) ? c : [c];
|
||||
for (const cmd of commands) {
|
||||
const commandline = cmdlineToArray(cmd);
|
||||
await execute(FileUriToPath(config.OutputFolderUri), commandline[0], ...commandline.slice(1));
|
||||
await execute(FileUriToPath(context.config.outputFolderUri), commandline[0], ...commandline.slice(1));
|
||||
}
|
||||
return new QuickDataSource([], input.pipeState);
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ import {
|
|||
} from "@azure-tools/datastore";
|
||||
import { clone, Dictionary } from "@azure-tools/linq";
|
||||
import * as oai from "@azure-tools/openapi";
|
||||
import { ConfigurationView } from "../../configuration";
|
||||
import { AutorestContext } from "../../configuration";
|
||||
import { PipelinePlugin } from "../common";
|
||||
|
||||
export class ComponentKeyRenamer extends Transformer<any, oai.Model> {
|
||||
|
@ -95,7 +95,7 @@ export class ComponentKeyRenamer extends Transformer<any, oai.Model> {
|
|||
}
|
||||
}
|
||||
|
||||
async function renameComponentsKeys(config: ConfigurationView, input: DataSource, sink: DataSink) {
|
||||
async function renameComponentsKeys(config: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map(async (x) => input.ReadStrict(x)));
|
||||
const result: Array<DataHandle> = [];
|
||||
for (const each of inputs) {
|
||||
|
|
|
@ -16,7 +16,7 @@ import {
|
|||
} from "@azure-tools/datastore";
|
||||
import { Dictionary } from "@azure-tools/linq";
|
||||
import * as oai from "@azure-tools/openapi";
|
||||
import { ConfigurationView } from "../../configuration";
|
||||
import { AutorestContext } from "../../configuration";
|
||||
import { PipelinePlugin } from "../common";
|
||||
|
||||
/**
|
||||
|
@ -39,8 +39,20 @@ type componentType =
|
|||
| "links"
|
||||
| "callbacks";
|
||||
|
||||
interface ComponentTracker {
|
||||
schemas: Set<string>;
|
||||
responses: Set<string>;
|
||||
parameters: Set<string>;
|
||||
examples: Set<string>;
|
||||
requestBodies: Set<string>;
|
||||
headers: Set<string>;
|
||||
securitySchemes: Set<string>;
|
||||
links: Set<string>;
|
||||
callbacks: Set<string>;
|
||||
}
|
||||
|
||||
export class ComponentsCleaner extends Transformer<any, oai.Model> {
|
||||
private visitedComponents = {
|
||||
private visitedComponents: ComponentTracker = {
|
||||
schemas: new Set<string>(),
|
||||
responses: new Set<string>(),
|
||||
parameters: new Set<string>(),
|
||||
|
@ -52,7 +64,7 @@ export class ComponentsCleaner extends Transformer<any, oai.Model> {
|
|||
callbacks: new Set<string>(),
|
||||
};
|
||||
|
||||
private componentsToKeep = {
|
||||
private componentsToKeep: ComponentTracker = {
|
||||
schemas: new Set<string>(),
|
||||
responses: new Set<string>(),
|
||||
parameters: new Set<string>(),
|
||||
|
@ -84,8 +96,8 @@ export class ComponentsCleaner extends Transformer<any, oai.Model> {
|
|||
for (const { children, key: containerType } of visit(this.components)) {
|
||||
for (const { value, key: id } of children) {
|
||||
if (!value["x-ms-metadata"]["x-ms-secondary-file"]) {
|
||||
this.visitedComponents[containerType].add(id);
|
||||
this.componentsToKeep[containerType].add(id);
|
||||
this.visitedComponents[containerType as keyof ComponentTracker].add(id);
|
||||
this.componentsToKeep[containerType as keyof ComponentTracker].add(id);
|
||||
this.crawlObject(value);
|
||||
}
|
||||
}
|
||||
|
@ -112,10 +124,10 @@ export class ComponentsCleaner extends Transformer<any, oai.Model> {
|
|||
const componentRefUid = refParts.pop();
|
||||
const refType = refParts.pop();
|
||||
if (
|
||||
this.componentsToKeep[refType].has(componentRefUid) &&
|
||||
!this.componentsToKeep[containerType].has(currentComponentUid)
|
||||
this.componentsToKeep[refType as keyof ComponentTracker].has(componentRefUid) &&
|
||||
!this.componentsToKeep[containerType as keyof ComponentTracker].has(currentComponentUid)
|
||||
) {
|
||||
this.componentsToKeep[containerType].add(currentComponentUid);
|
||||
this.componentsToKeep[containerType as keyof ComponentTracker].add(currentComponentUid);
|
||||
this.crawlObject(component);
|
||||
entryAdded = true;
|
||||
}
|
||||
|
@ -128,10 +140,10 @@ export class ComponentsCleaner extends Transformer<any, oai.Model> {
|
|||
const componentRefUid = refParts.pop();
|
||||
const refType = refParts.pop();
|
||||
if (
|
||||
this.componentsToKeep[refType].has(componentRefUid) &&
|
||||
!this.componentsToKeep[containerType].has(currentComponentUid)
|
||||
this.componentsToKeep[refType as keyof ComponentTracker].has(componentRefUid) &&
|
||||
!this.componentsToKeep[containerType as keyof ComponentTracker].has(currentComponentUid)
|
||||
) {
|
||||
this.componentsToKeep[containerType].add(currentComponentUid);
|
||||
this.componentsToKeep[containerType as keyof ComponentTracker].add(currentComponentUid);
|
||||
this.crawlObject(component);
|
||||
entryAdded = true;
|
||||
}
|
||||
|
@ -216,7 +228,7 @@ export class ComponentsCleaner extends Transformer<any, oai.Model> {
|
|||
}
|
||||
}
|
||||
|
||||
async function clean(config: ConfigurationView, input: DataSource, sink: DataSink) {
|
||||
async function clean(config: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map(async (x) => input.ReadStrict(x)));
|
||||
const result: Array<DataHandle> = [];
|
||||
for (const each of inputs) {
|
||||
|
|
|
@ -19,7 +19,7 @@ import {
|
|||
} from "@azure-tools/datastore";
|
||||
import { From } from "linq-es2015";
|
||||
import { pushAll } from "../../array";
|
||||
import { ConfigurationView } from "../../autorest-core";
|
||||
import { AutorestContext } from "../../autorest-core";
|
||||
import { IdentitySourceMapping, MergeYamls } from "../../source-map/merging";
|
||||
import { PipelinePlugin } from "../common";
|
||||
|
||||
|
@ -47,7 +47,7 @@ function getPropertyValues<T, U>(obj: ObjectWithPath<T>): Array<ObjectWithPath<U
|
|||
}
|
||||
|
||||
async function composeSwaggers(
|
||||
config: ConfigurationView,
|
||||
context: AutorestContext,
|
||||
overrideInfoTitle: any,
|
||||
overrideInfoDescription: any,
|
||||
inputSwaggers: Array<DataHandle>,
|
||||
|
@ -273,7 +273,7 @@ async function composeSwaggers(
|
|||
inputSwaggers[i] = await sink.WriteObject("prepared", swagger, newIdentity, undefined, mapping, [inputSwagger]);
|
||||
}
|
||||
|
||||
let hSwagger = await MergeYamls(config, inputSwaggers, sink, true);
|
||||
let hSwagger = await MergeYamls(context, inputSwaggers, sink, true);
|
||||
|
||||
// override info section
|
||||
const info: any = { title: candidateTitles[0] };
|
||||
|
@ -282,11 +282,11 @@ async function composeSwaggers(
|
|||
}
|
||||
const hInfo = await sink.WriteObject("info.yaml", { info }, ["fix-me-4"]);
|
||||
|
||||
hSwagger = await MergeYamls(config, [hSwagger, hInfo], sink);
|
||||
hSwagger = await MergeYamls(context, [hSwagger, hInfo], sink);
|
||||
|
||||
return hSwagger;
|
||||
} catch (E) {
|
||||
if (config.DebugMode) {
|
||||
if (context.config.debug) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(`${__filename} - FAILURE ${JSON.stringify(E)}`);
|
||||
}
|
||||
|
|
|
@ -5,21 +5,20 @@
|
|||
|
||||
import { DataHandle, DataSink, DataSource, QuickDataSource } from "@azure-tools/datastore";
|
||||
import { Deduplicator } from "@azure-tools/deduplication";
|
||||
import { ConfigurationView } from "../../configuration";
|
||||
import { AutorestContext } from "../../configuration";
|
||||
import { PipelinePlugin } from "../common";
|
||||
import { values } from "@azure-tools/linq";
|
||||
import { Channel } from "../../message";
|
||||
|
||||
async function deduplicate(config: ConfigurationView, input: DataSource, sink: DataSink) {
|
||||
async function deduplicate(context: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map(async (x) => input.ReadStrict(x)));
|
||||
const result: Array<DataHandle> = [];
|
||||
|
||||
const idm = !!config["deduplicate-inline-models"];
|
||||
const idm = !!context.config["deduplicate-inline-models"];
|
||||
|
||||
for (const each of values(inputs).where((input) => input.artifactType !== "profile-filter-log")) {
|
||||
const model = <any>await each.ReadObject();
|
||||
|
||||
/*
|
||||
/*
|
||||
Disabling for now -- not sure if we need to skip this in the simple case anyway.
|
||||
if ([...values(model?.info?.['x-ms-metadata']?.apiVersions).distinct()].length < 2) {
|
||||
config.Message({ Channel: Channel.Verbose, Text: `Skipping Deduplication on single-api-version file ${each.identity}` });
|
||||
|
|
|
@ -1,28 +1,17 @@
|
|||
import {
|
||||
DataHandle,
|
||||
DataSource,
|
||||
Lazy,
|
||||
Normalize,
|
||||
QuickDataSource,
|
||||
createSandbox,
|
||||
Stringify,
|
||||
YAMLNode,
|
||||
} from "@azure-tools/datastore";
|
||||
import { DataHandle, DataSource, Normalize, QuickDataSource, createSandbox, Stringify } from "@azure-tools/datastore";
|
||||
import { ResolveUri } from "@azure-tools/uri";
|
||||
import { Artifact } from "../../../exports";
|
||||
import { ConfigurationView } from "../../configuration";
|
||||
import { AutorestContext } from "../../configuration";
|
||||
import { Channel } from "../../message";
|
||||
import { IdentitySourceMapping } from "../../source-map/merging";
|
||||
import { PipelinePlugin } from "../common";
|
||||
|
||||
const safeEval = createSandbox();
|
||||
|
||||
function isOutputArtifactOrMapRequested(config: ConfigurationView, artifactType: string) {
|
||||
function isOutputArtifactOrMapRequested(config: AutorestContext, artifactType: string) {
|
||||
return config.IsOutputArtifactRequested(artifactType) || config.IsOutputArtifactRequested(artifactType + ".map");
|
||||
}
|
||||
|
||||
async function emitArtifactInternal(
|
||||
config: ConfigurationView,
|
||||
config: AutorestContext,
|
||||
artifactType: string,
|
||||
uri: string,
|
||||
handle: DataHandle,
|
||||
|
@ -56,7 +45,7 @@ async function emitArtifactInternal(
|
|||
|
||||
let emitCtr = 0;
|
||||
async function emitArtifact(
|
||||
config: ConfigurationView,
|
||||
config: AutorestContext,
|
||||
uri: string,
|
||||
handle: DataHandle,
|
||||
isObject: boolean,
|
||||
|
@ -116,7 +105,7 @@ async function emitArtifact(
|
|||
}
|
||||
|
||||
export async function emitArtifacts(
|
||||
config: ConfigurationView,
|
||||
config: AutorestContext,
|
||||
artifactTypeFilter: string | Array<string> | null /* what's set on the emitter */,
|
||||
uriResolver: (key: string) => string,
|
||||
scope: DataSource,
|
||||
|
@ -143,26 +132,29 @@ export async function emitArtifacts(
|
|||
|
||||
/* @internal */
|
||||
export function createArtifactEmitterPlugin(inputOverride?: () => Promise<DataSource>): PipelinePlugin {
|
||||
return async (config, input) => {
|
||||
return async (context, input) => {
|
||||
if (inputOverride) {
|
||||
input = await inputOverride();
|
||||
}
|
||||
|
||||
// clear output-folder if requested
|
||||
if (config.GetEntry(<any>"clear-output-folder")) {
|
||||
config.ClearFolder.Dispatch(config.OutputFolderUri);
|
||||
if (context.GetEntry("clear-output-folder")) {
|
||||
context.ClearFolder.Dispatch(context.config.outputFolderUri);
|
||||
}
|
||||
|
||||
await emitArtifacts(
|
||||
config,
|
||||
config.GetEntry(<any>"input-artifact") || null,
|
||||
context,
|
||||
context.GetEntry("input-artifact") || null,
|
||||
(key) =>
|
||||
ResolveUri(
|
||||
config.OutputFolderUri,
|
||||
safeEval<string>(config.GetEntry(<any>"output-uri-expr") || "$key", { $key: key, $config: config.Raw }),
|
||||
context.config.outputFolderUri,
|
||||
safeEval<string>(context.GetEntry("output-uri-expr") || "$key", {
|
||||
$key: key,
|
||||
$config: context.config.raw,
|
||||
}),
|
||||
),
|
||||
input,
|
||||
config.GetEntry(<any>"is-object"),
|
||||
context.GetEntry("is-object"),
|
||||
);
|
||||
return new QuickDataSource([]);
|
||||
};
|
||||
|
|
|
@ -8,7 +8,7 @@ import {
|
|||
TransformerViaPointer,
|
||||
QuickDataSource,
|
||||
} from "@azure-tools/datastore";
|
||||
import { ConfigurationView } from "../../configuration";
|
||||
import { AutorestContext } from "../../configuration";
|
||||
import { PipelinePlugin } from "../common";
|
||||
import { Dictionary, items } from "@azure-tools/linq";
|
||||
import compareVersions from "compare-versions";
|
||||
|
@ -104,7 +104,7 @@ export class EnumDeduplicator extends TransformerViaPointer {
|
|||
}
|
||||
}
|
||||
|
||||
fixUp(originalRef: string, newRef: string, pointer) {
|
||||
fixUp(originalRef: string, newRef: string, pointer: string) {
|
||||
const fixups = this.refs.get(originalRef);
|
||||
if (fixups) {
|
||||
for (const each of fixups) {
|
||||
|
@ -115,7 +115,7 @@ export class EnumDeduplicator extends TransformerViaPointer {
|
|||
}
|
||||
}
|
||||
|
||||
async function deduplicateEnums(config: ConfigurationView, input: DataSource, sink: DataSink) {
|
||||
async function deduplicateEnums(config: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map(async (x) => input.ReadStrict(x)));
|
||||
const result: Array<DataHandle> = [];
|
||||
for (const each of inputs) {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { PipelinePlugin } from "../common";
|
||||
import { ConfigurationView } from "../../configuration";
|
||||
import { AutorestContext } from "../../configuration";
|
||||
import { DataSource, DataSink, DataHandle, QuickDataSource } from "@azure-tools/datastore";
|
||||
import { values } from "@azure-tools/linq";
|
||||
|
||||
|
@ -12,20 +12,20 @@ export function createNullPlugin(): PipelinePlugin {
|
|||
return async (config, input) => new QuickDataSource([]);
|
||||
}
|
||||
|
||||
async function resetIdentity(config: ConfigurationView, input: DataSource, sink: DataSink) {
|
||||
async function resetIdentity(context: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map(async (x) => input.ReadStrict(x)));
|
||||
const result: Array<DataHandle> = [];
|
||||
const numberEachFile = inputs.length > 1 && values(inputs).distinct((each) => each.Description);
|
||||
let i = 0;
|
||||
for (const each of inputs) {
|
||||
let name = `${config.name || each.Description}`;
|
||||
let name = `${context.config.name || each.Description}`;
|
||||
if (numberEachFile) {
|
||||
let p = name.lastIndexOf(".");
|
||||
p = p === -1 ? name.length : p;
|
||||
name = `${name.substring(0, p)}-${i++}${name.substring(p)}`;
|
||||
}
|
||||
|
||||
result.push(await sink.WriteData(name, await each.ReadData(), each.identity, config.to));
|
||||
result.push(await sink.WriteData(name, await each.ReadData(), each.identity, context.config.to));
|
||||
}
|
||||
return new QuickDataSource(result, input.pipeState);
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
import { PipelinePlugin } from "../common";
|
||||
|
||||
import { ConfigurationView } from "../../autorest-core";
|
||||
import { AutorestContext } from "../../autorest-core";
|
||||
import { Channel, SourceLocation } from "../../message";
|
||||
import { commonmarkHeadingFollowingText, commonmarkSubHeadings, parseCommonmark } from "../../parsing/literate";
|
||||
import { parse as ParseLiterateYaml } from "../../parsing/literate-yaml";
|
||||
|
@ -30,7 +30,7 @@ import { crawlReferences } from "./ref-crawling";
|
|||
* If a JSON file is provided, it checks that the syntax is correct.
|
||||
* And if the syntax is incorrect, it puts an error message .
|
||||
*/
|
||||
async function checkSyntaxFromData(fileUri: string, handle: DataHandle, configView: ConfigurationView): Promise<void> {
|
||||
async function checkSyntaxFromData(fileUri: string, handle: DataHandle, configView: AutorestContext): Promise<void> {
|
||||
if (fileUri.toLowerCase().endsWith(".json")) {
|
||||
const error = StrictJsonSyntaxCheck(await handle.ReadData());
|
||||
if (error) {
|
||||
|
@ -53,7 +53,7 @@ function isOpenAPI3Spec(specObject: OpenAPI3Spec): boolean {
|
|||
}
|
||||
|
||||
export async function LoadLiterateSwagger(
|
||||
config: ConfigurationView,
|
||||
config: AutorestContext,
|
||||
inputScope: DataSource,
|
||||
inputFileUri: string,
|
||||
sink: DataSink,
|
||||
|
@ -75,7 +75,7 @@ export async function LoadLiterateSwagger(
|
|||
}
|
||||
|
||||
export async function LoadLiterateOpenAPI(
|
||||
config: ConfigurationView,
|
||||
config: AutorestContext,
|
||||
inputScope: DataSource,
|
||||
inputFileUri: string,
|
||||
sink: DataSink,
|
||||
|
@ -96,7 +96,7 @@ export async function LoadLiterateOpenAPI(
|
|||
}
|
||||
|
||||
export async function LoadLiterateSwaggers(
|
||||
config: ConfigurationView,
|
||||
config: AutorestContext,
|
||||
inputScope: DataSource,
|
||||
inputFileUris: Array<string>,
|
||||
sink: DataSink,
|
||||
|
@ -114,7 +114,7 @@ export async function LoadLiterateSwaggers(
|
|||
}
|
||||
|
||||
export async function LoadLiterateOpenAPIs(
|
||||
config: ConfigurationView,
|
||||
config: AutorestContext,
|
||||
inputScope: DataSource,
|
||||
inputFileUris: Array<string>,
|
||||
sink: DataSink,
|
||||
|
@ -140,7 +140,7 @@ interface OpenAPI3Spec {
|
|||
/* @internal */
|
||||
export function createSwaggerLoaderPlugin(): PipelinePlugin {
|
||||
return async (config, input, sink) => {
|
||||
const inputs = config.InputFileUris;
|
||||
const inputs = config.config.inputFileUris;
|
||||
const swaggers = await LoadLiterateSwaggers(config, input, inputs, sink);
|
||||
|
||||
const foundAllFiles = swaggers.length !== inputs.length;
|
||||
|
@ -156,7 +156,7 @@ export function createSwaggerLoaderPlugin(): PipelinePlugin {
|
|||
/* @internal */
|
||||
export function createOpenApiLoaderPlugin(): PipelinePlugin {
|
||||
return async (config, input, sink) => {
|
||||
const inputs = config.InputFileUris;
|
||||
const inputs = config.config.inputFileUris;
|
||||
const openapis = await LoadLiterateOpenAPIs(config, input, inputs, sink);
|
||||
let result: Array<DataHandle> = [];
|
||||
if (openapis.length === inputs.length) {
|
||||
|
|
|
@ -12,7 +12,7 @@ import {
|
|||
import { clone, Dictionary, values, visitor } from "@azure-tools/linq";
|
||||
|
||||
import * as oai from "@azure-tools/openapi";
|
||||
import { ConfigurationView } from "../../configuration";
|
||||
import { AutorestContext } from "../../configuration";
|
||||
import { PipelinePlugin } from "../common";
|
||||
|
||||
/**
|
||||
|
@ -422,7 +422,7 @@ function cleanRefs(instance: AnyObject): AnyObject {
|
|||
return instance;
|
||||
}
|
||||
|
||||
async function merge(config: ConfigurationView, input: DataSource, sink: DataSink) {
|
||||
async function merge(config: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map((x) => input.ReadStrict(x)));
|
||||
if (inputs.length === 1) {
|
||||
const model = await inputs[0].ReadObject<any>();
|
||||
|
|
|
@ -13,7 +13,7 @@ import { areSimilar } from "@azure-tools/object-comparison";
|
|||
import { PipelinePlugin } from "../common";
|
||||
import { maximum, toSemver } from "@azure-tools/codegen";
|
||||
import compareVersions from "compare-versions";
|
||||
import { ConfigurationView } from "../../configuration";
|
||||
import { AutorestContext } from "../../configuration";
|
||||
|
||||
try {
|
||||
require("source-map-support").install();
|
||||
|
@ -277,11 +277,11 @@ export class NewComposer extends Transformer<AnyObject, AnyObject> {
|
|||
|
||||
protected cloneInto<TParent extends object>(target: ProxyObject<TParent>, originalNodes: Iterable<Node>) {
|
||||
for (const { key, value, pointer } of originalNodes) {
|
||||
if (target[key] === undefined) {
|
||||
if (target[key as keyof TParent] === undefined) {
|
||||
// the value isn't in the target. We can take it from the source
|
||||
this.clone(<AnyObject>target, key, pointer, value);
|
||||
} else {
|
||||
if (!areSimilar(value, target[key], "x-ms-metadata", "description", "summary")) {
|
||||
if (!areSimilar(value, target[key as keyof TParent], "x-ms-metadata", "description", "summary")) {
|
||||
throw new Error(`Incompatible models conflicting: ${pointer}`);
|
||||
}
|
||||
}
|
||||
|
@ -478,7 +478,7 @@ export class NewComposer extends Transformer<AnyObject, AnyObject> {
|
|||
}
|
||||
}
|
||||
|
||||
async function compose(config: ConfigurationView, input: DataSource, sink: DataSink) {
|
||||
async function compose(config: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map(async (x) => input.ReadStrict(x)));
|
||||
|
||||
// compose-a-vous!
|
||||
|
|
|
@ -20,7 +20,7 @@ import {
|
|||
} from "@azure-tools/datastore";
|
||||
import { Dictionary, items, values } from "@azure-tools/linq";
|
||||
import * as oai from "@azure-tools/openapi";
|
||||
import { ConfigurationView } from "../../configuration";
|
||||
import { AutorestContext } from "../../configuration";
|
||||
import { PipelinePlugin } from "../common";
|
||||
|
||||
type componentType =
|
||||
|
@ -54,12 +54,24 @@ interface OperationData {
|
|||
path: string;
|
||||
}
|
||||
|
||||
interface ComponentTracker {
|
||||
schemas: Set<string>;
|
||||
responses: Set<string>;
|
||||
parameters: Set<string>;
|
||||
examples: Set<string>;
|
||||
requestBodies: Set<string>;
|
||||
headers: Set<string>;
|
||||
securitySchemes: Set<string>;
|
||||
links: Set<string>;
|
||||
callbacks: Set<string>;
|
||||
}
|
||||
|
||||
export class ProfileFilter extends Transformer<any, oai.Model> {
|
||||
filterTargets: Array<{ apiVersion: string; profile: string; pathRegex: RegExp; weight: number }> = [];
|
||||
|
||||
// sets containing the UIDs of components already visited.
|
||||
// This is used to prevent circular references.
|
||||
private visitedComponents = {
|
||||
private visitedComponents: ComponentTracker = {
|
||||
schemas: new Set<string>(),
|
||||
responses: new Set<string>(),
|
||||
parameters: new Set<string>(),
|
||||
|
@ -71,7 +83,7 @@ export class ProfileFilter extends Transformer<any, oai.Model> {
|
|||
callbacks: new Set<string>(),
|
||||
};
|
||||
|
||||
private componentsToKeep = {
|
||||
private componentsToKeep: ComponentTracker = {
|
||||
schemas: new Set<string>(),
|
||||
responses: new Set<string>(),
|
||||
parameters: new Set<string>(),
|
||||
|
@ -414,7 +426,7 @@ export class ProfileFilter extends Transformer<any, oai.Model> {
|
|||
|
||||
visitComponent<T>(type: string, container: ProxyObject<Dictionary<T>>, nodes: Iterable<Node>) {
|
||||
for (const { key, value, pointer } of nodes) {
|
||||
if (this.componentsToKeep[type].has(key)) {
|
||||
if (this.componentsToKeep[type as keyof ComponentTracker].has(key)) {
|
||||
this.clone(container, key, pointer, value);
|
||||
}
|
||||
}
|
||||
|
@ -439,7 +451,7 @@ export class ProfileFilter extends Transformer<any, oai.Model> {
|
|||
}
|
||||
}
|
||||
|
||||
async function filter(config: ConfigurationView, input: DataSource, sink: DataSink) {
|
||||
async function filter(config: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map(async (x) => input.ReadStrict(x)));
|
||||
const result: Array<DataHandle> = [];
|
||||
for (const each of inputs) {
|
||||
|
@ -501,7 +513,9 @@ function getFilesUsed(nodes: Iterable<Node>) {
|
|||
switch (field.key) {
|
||||
case "paths":
|
||||
for (const path of field.children) {
|
||||
path.value["x-ms-metadata"].originalLocations.map((x) => filesUsed.add(x.replace(/(.*)#\/paths.*/g, "$1")));
|
||||
path.value["x-ms-metadata"].originalLocations.map((x: string) =>
|
||||
filesUsed.add(x.replace(/(.*)#\/paths.*/g, "$1")),
|
||||
);
|
||||
}
|
||||
break;
|
||||
|
||||
|
@ -518,7 +532,7 @@ function getFilesUsed(nodes: Iterable<Node>) {
|
|||
case "callbacks":
|
||||
case "securitySchemes":
|
||||
for (const component of collection.children) {
|
||||
component.value["x-ms-metadata"].originalLocations.map((x) =>
|
||||
component.value["x-ms-metadata"].originalLocations.map((x: any) =>
|
||||
filesUsed.add(x.replace(/(.*)#\/components.*/g, "$1")),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -9,12 +9,12 @@ import {
|
|||
QuickDataSource,
|
||||
} from "@azure-tools/datastore";
|
||||
import { items } from "@azure-tools/linq";
|
||||
import { ConfigurationView } from "../../configuration";
|
||||
import { AutorestContext } from "../../configuration";
|
||||
import { PipelinePlugin } from "../common";
|
||||
import { format } from "path";
|
||||
import { Channel } from "../../message";
|
||||
|
||||
async function quickCheck(config: ConfigurationView, input: DataSource, sink: DataSink) {
|
||||
async function quickCheck(config: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map(async (x) => input.ReadStrict(x)));
|
||||
for (const each of inputs) {
|
||||
const oai = await each.ReadObject<AnyObject>();
|
||||
|
|
|
@ -10,13 +10,13 @@ import {
|
|||
visit,
|
||||
} from "@azure-tools/datastore";
|
||||
import { ResolveUri } from "@azure-tools/uri";
|
||||
import { ConfigurationView } from "../../configuration";
|
||||
import { AutorestContext } from "../../configuration";
|
||||
import { Channel } from "../../message";
|
||||
import { values, items, length } from "@azure-tools/linq";
|
||||
/* eslint-disable @typescript-eslint/no-use-before-define */
|
||||
|
||||
export async function crawlReferences(
|
||||
config: ConfigurationView,
|
||||
config: AutorestContext,
|
||||
inputScope: DataSource,
|
||||
filesToCrawl: Array<DataHandle>,
|
||||
sink: DataSink,
|
||||
|
@ -99,7 +99,7 @@ class RefProcessor extends Transformer<any, any> {
|
|||
}
|
||||
|
||||
async processXMSExamples(targetParent: AnyObject, examples: AnyObject) {
|
||||
const xmsExamples = {};
|
||||
const xmsExamples: any = {};
|
||||
|
||||
for (const { key, value } of items(examples)) {
|
||||
if (value.$ref) {
|
||||
|
|
|
@ -12,7 +12,7 @@ import {
|
|||
import { clone, Dictionary, values } from "@azure-tools/linq";
|
||||
import { areSimilar } from "@azure-tools/object-comparison";
|
||||
import * as oai from "@azure-tools/openapi";
|
||||
import { ConfigurationView } from "../../configuration";
|
||||
import { AutorestContext } from "../../configuration";
|
||||
import { PipelinePlugin } from "../common";
|
||||
import { toSemver, maximum, gt, lt } from "@azure-tools/codegen";
|
||||
import { Channel } from "../../message";
|
||||
|
@ -62,7 +62,7 @@ export class SubsetSchemaDeduplicator extends Transformer<any, oai.Model> {
|
|||
|
||||
visitSchemas<T>(container: ProxyObject<Dictionary<T>>, originalNodes: () => Iterable<Node>) {
|
||||
const xMsMetadata = "x-ms-metadata";
|
||||
const updatedSchemas = {};
|
||||
const updatedSchemas: any = {};
|
||||
|
||||
// get all the schemas and associate them with their uid
|
||||
// this will allow us to place the value in the right place at the end
|
||||
|
@ -115,7 +115,9 @@ export class SubsetSchemaDeduplicator extends Transformer<any, oai.Model> {
|
|||
// gs: added -- ensure that properties left beg
|
||||
if (currentSchema.value.required && supersetEquivSchema.properties) {
|
||||
const sesNames = Object.getOwnPropertyNames(supersetEquivSchema.properties);
|
||||
supersetEquivSchema.required = currentSchema.value.required.filter((each) => sesNames.indexOf(each) > -1);
|
||||
supersetEquivSchema.required = currentSchema.value.required.filter(
|
||||
(each: any) => sesNames.indexOf(each) > -1,
|
||||
);
|
||||
}
|
||||
|
||||
// replace with equivalent schema and put back metadata.
|
||||
|
@ -344,7 +346,7 @@ export interface SubsetCheckResult {
|
|||
};
|
||||
}
|
||||
|
||||
async function deduplicateSubsetSchemas(config: ConfigurationView, input: DataSource, sink: DataSink) {
|
||||
async function deduplicateSubsetSchemas(config: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map(async (x) => input.ReadStrict(x)));
|
||||
const result: Array<DataHandle> = [];
|
||||
for (const each of inputs) {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { createSandbox, JsonPath } from "@azure-tools/datastore";
|
||||
import { ConfigurationView } from "../../../autorest-core";
|
||||
import { AutorestContext } from "../../../autorest-core";
|
||||
import { Channel } from "../../../message";
|
||||
|
||||
const safeEval = createSandbox();
|
||||
|
@ -8,7 +8,7 @@ export interface TransformOptions {
|
|||
/**
|
||||
* Current configuration.
|
||||
*/
|
||||
config: ConfigurationView;
|
||||
config: AutorestContext;
|
||||
|
||||
/**
|
||||
* Value to transform.
|
||||
|
@ -86,7 +86,7 @@ interface Lib {
|
|||
debug: (message: string) => void;
|
||||
verbose: (message: string) => void;
|
||||
log: (message: string) => void;
|
||||
config: ConfigurationView;
|
||||
config: AutorestContext;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -5,17 +5,17 @@
|
|||
|
||||
import { DataHandle, DataSink, nodes } from "@azure-tools/datastore";
|
||||
import { YieldCPU } from "@azure-tools/tasks";
|
||||
import { ConfigurationView } from "../../../autorest-core";
|
||||
import { ResolvedDirective } from "../../../configuration";
|
||||
import { AutorestContext } from "../../../autorest-core";
|
||||
import { Channel, Message, SourceLocation } from "../../../message";
|
||||
import { manipulateObject } from "./object-manipulator";
|
||||
import { values } from "@azure-tools/linq";
|
||||
import { evalDirectiveTest, evalDirectiveTransform } from "./eval";
|
||||
import { ResolvedDirective } from "@autorest/configuration";
|
||||
|
||||
export class Manipulator {
|
||||
private transformations: Array<ResolvedDirective>;
|
||||
|
||||
public constructor(private config: ConfigurationView) {
|
||||
public constructor(private config: AutorestContext) {
|
||||
this.transformations = config.resolveDirectives(
|
||||
(directive) => directive.from.length > 0 && directive.transform.length > 0 && directive.where.length > 0,
|
||||
);
|
||||
|
|
|
@ -19,7 +19,7 @@ import {
|
|||
ToAst,
|
||||
YAMLNode,
|
||||
} from "@azure-tools/datastore";
|
||||
import { ConfigurationView } from "../../../autorest-core";
|
||||
import { AutorestContext } from "../../../autorest-core";
|
||||
import { Channel } from "../../../message";
|
||||
import { IdentitySourceMapping } from "../../../source-map/merging";
|
||||
|
||||
|
@ -28,7 +28,7 @@ export async function manipulateObject(
|
|||
target: DataSink,
|
||||
whereJsonQuery: string,
|
||||
transformer: (doc: any, obj: any, path: JsonPath) => any, // transforming to `undefined` results in removal
|
||||
config?: ConfigurationView,
|
||||
config?: AutorestContext,
|
||||
transformationString?: string,
|
||||
mappingInfo?: {
|
||||
transformerSourceHandle: DataHandle;
|
||||
|
@ -115,7 +115,7 @@ export async function manipulateObject(
|
|||
};
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
// try to be smart and assume that nodes existing in both old and new AST have a relationship
|
||||
mapping.push(
|
||||
...From(Descendants(newAst))
|
||||
|
|
|
@ -171,7 +171,7 @@ export function createTextTransformerPlugin(): PipelinePlugin {
|
|||
/* @internal */
|
||||
export function createTransformerPlugin(): PipelinePlugin {
|
||||
return createPerFilePlugin(async (config) => {
|
||||
const isObject = config.GetEntry(<any>"is-object") === false ? false : true;
|
||||
const isObject = config.GetEntry("is-object") === false ? false : true;
|
||||
const manipulator = new Manipulator(config);
|
||||
return async (fileIn, sink) => {
|
||||
const fileOut = await manipulator.process(fileIn, sink, isObject, fileIn.Description);
|
||||
|
@ -183,11 +183,11 @@ export function createTransformerPlugin(): PipelinePlugin {
|
|||
/* @internal */
|
||||
export function createImmediateTransformerPlugin(): PipelinePlugin {
|
||||
return async (config, input, sink) => {
|
||||
const isObject = config.GetEntry(<any>"is-object") === false ? false : true;
|
||||
const isObject = config.GetEntry("is-object") === false ? false : true;
|
||||
const files = await input.Enum(); // first all the immediate-configs, then a single swagger-document
|
||||
const scopes = await Promise.all(files.slice(0, files.length - 1).map((f) => input.ReadStrict(f)));
|
||||
const manipulator = new Manipulator(
|
||||
config.GetNestedConfigurationImmediate(...(await Promise.all(scopes.map((s) => s.ReadObject<any>())))),
|
||||
config.extendWith(...(await Promise.all(scopes.map((s) => s.ReadObject<any>())))),
|
||||
);
|
||||
const file = files[files.length - 1];
|
||||
const fileIn = await input.ReadStrict(file);
|
||||
|
|
|
@ -11,7 +11,7 @@ import {
|
|||
JsonPath,
|
||||
Source,
|
||||
} from "@azure-tools/datastore";
|
||||
import { ConfigurationView } from "../../../configuration";
|
||||
import { AutorestContext } from "../../../configuration";
|
||||
import { PipelinePlugin } from "../../common";
|
||||
import { values, length } from "@azure-tools/linq";
|
||||
import { createHash } from "crypto";
|
||||
|
@ -783,7 +783,7 @@ export class OAI3Shaker extends Transformer<AnyObject, AnyObject> {
|
|||
}
|
||||
}
|
||||
|
||||
async function shakeTree(config: ConfigurationView, input: DataSource, sink: DataSink) {
|
||||
async function shakeTree(config: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map(async (x) => input.ReadStrict(x)));
|
||||
const result: Array<DataHandle> = [];
|
||||
const isSimpleTreeShake = !!config.GetEntry("simple-tree-shake");
|
||||
|
|
|
@ -11,7 +11,7 @@ import {
|
|||
} from "@azure-tools/datastore";
|
||||
import { clone, Dictionary } from "@azure-tools/linq";
|
||||
import * as oai from "@azure-tools/openapi";
|
||||
import { ConfigurationView } from "../../configuration";
|
||||
import { AutorestContext } from "../../configuration";
|
||||
import { PipelinePlugin } from "../common";
|
||||
|
||||
export class ApiVersionParameterHandler extends Transformer<any, oai.Model> {
|
||||
|
@ -160,7 +160,7 @@ export class ApiVersionParameterHandler extends Transformer<any, oai.Model> {
|
|||
}
|
||||
}
|
||||
|
||||
async function handleApiVersionParameter(config: ConfigurationView, input: DataSource, sink: DataSink) {
|
||||
async function handleApiVersionParameter(config: AutorestContext, input: DataSource, sink: DataSink) {
|
||||
const inputs = await Promise.all((await input.Enum()).map(async (x) => input.ReadStrict(x)));
|
||||
const result: Array<DataHandle> = [];
|
||||
if (config.GetEntry("azure-arm")) {
|
||||
|
|
|
@ -8,9 +8,7 @@ import SchemaValidator from "z-schema";
|
|||
import { OperationAbortedException } from "../exception";
|
||||
import { Channel } from "../message";
|
||||
import { createPerFilePlugin, PipelinePlugin } from "./common";
|
||||
import * as path from "path";
|
||||
import { AppRoot } from "../constants";
|
||||
import { ConfigurationView } from "../configuration";
|
||||
import { AutorestContext } from "../configuration";
|
||||
|
||||
export function createSwaggerSchemaValidatorPlugin(): PipelinePlugin {
|
||||
const validator = new SchemaValidator({ breakOnFirstError: false });
|
||||
|
@ -47,18 +45,18 @@ export function createOpenApiSchemaValidatorPlugin(): PipelinePlugin {
|
|||
const validator = new SchemaValidator({ breakOnFirstError: false });
|
||||
|
||||
const extendedOpenApiSchema = require(`@autorest/schemas/openapi3-schema.json`);
|
||||
return createPerFilePlugin(async (config) => async (fileIn, sink) => {
|
||||
return createPerFilePlugin(async (context) => async (fileIn, sink) => {
|
||||
const obj = await fileIn.ReadObject<any>();
|
||||
const isSecondary = !!obj["x-ms-secondary-file"];
|
||||
const markErrorAsWarnings = config["mark-oai3-errors-as-warnings"];
|
||||
const markErrorAsWarnings = context.config["mark-oai3-errors-as-warnings"];
|
||||
const errors = await validateSchema(obj, extendedOpenApiSchema, validator);
|
||||
if (errors !== null) {
|
||||
for (const error of errors) {
|
||||
const level = markErrorAsWarnings || isSecondary ? "warning" : "error";
|
||||
logValidationError(config, fileIn, error, "schema-validator-openapi", level);
|
||||
logValidationError(context, fileIn, error, "schema-validator-openapi", level);
|
||||
}
|
||||
if (!isSecondary) {
|
||||
config.Message({
|
||||
context.Message({
|
||||
Channel: Channel.Error,
|
||||
Plugin: "schema-validator-openapi",
|
||||
Text: [
|
||||
|
@ -99,7 +97,7 @@ const validateSchema = (
|
|||
};
|
||||
|
||||
const logValidationError = (
|
||||
config: ConfigurationView,
|
||||
config: AutorestContext,
|
||||
fileIn: DataHandle,
|
||||
error: ValidationError,
|
||||
pluginName: string,
|
||||
|
|
|
@ -3,16 +3,16 @@
|
|||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { ResolvedDirective } from "@autorest/configuration";
|
||||
import { JsonPath, matches } from "@azure-tools/datastore";
|
||||
import { From } from "linq-es2015";
|
||||
import { ConfigurationView } from "../autorest-core";
|
||||
import { ResolvedDirective } from "../configuration";
|
||||
import { AutorestContext } from "../autorest-core";
|
||||
import { Message } from "../message";
|
||||
|
||||
export class Suppressor {
|
||||
private suppressions: Array<ResolvedDirective>;
|
||||
|
||||
public constructor(private config: ConfigurationView) {
|
||||
public constructor(private config: AutorestContext) {
|
||||
this.suppressions = config.resolveDirectives((x) => x.suppress.length > 0);
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ import {
|
|||
Parse,
|
||||
} from "@azure-tools/datastore";
|
||||
import { pushAll } from "../array";
|
||||
import { ConfigurationView } from "../configuration";
|
||||
import { AutorestContext } from "../configuration";
|
||||
import { Channel } from "../message";
|
||||
import { isArray } from "util";
|
||||
|
||||
|
@ -274,7 +274,7 @@ export function IdentitySourceMapping(sourceYamlFileName: string, sourceYamlAst:
|
|||
}
|
||||
|
||||
export async function MergeYamls(
|
||||
config: ConfigurationView,
|
||||
config: AutorestContext,
|
||||
yamlInputHandles: Array<DataHandle>,
|
||||
sink: DataSink,
|
||||
verifyOAI2 = false,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import assert from "assert";
|
||||
import { MemoryFileSystem } from "@azure-tools/datastore";
|
||||
import * as AutoRest from "../src/lib/autorest-core";
|
||||
|
||||
|
@ -49,42 +48,42 @@ csharp:
|
|||
);
|
||||
|
||||
const autorest = new AutoRest.AutoRest(f, MemoryFileSystem.DefaultVirtualRootUri + "readme.md");
|
||||
let cfg = await autorest.view;
|
||||
const context = await autorest.view;
|
||||
const cfg = context.config;
|
||||
|
||||
// output folder should be 'foo'
|
||||
assert.equal(cfg["output-folder"], "foo");
|
||||
expect(cfg.raw["output-folder"]).toEqual("foo");
|
||||
|
||||
// sample-other should get resolved to the value of sample-value
|
||||
assert.equal(cfg["sample-other"], "one");
|
||||
expect(cfg.raw["sample-other"]).toEqual("one");
|
||||
|
||||
// verify that the items object that uses a macro works too
|
||||
assert.equal(cfg["items"][3], "one/two");
|
||||
expect(cfg.raw["items"][3]).toEqual("one/two");
|
||||
|
||||
for (const each of cfg.GetNestedConfiguration("csharp")) {
|
||||
for (const each of context.getNestedConfiguration("csharp")) {
|
||||
// verify the output folder is relative
|
||||
assert.equal(each.GetEntry("output-folder"), "foo/csharp");
|
||||
expect(each.GetEntry("output-folder")).toEqual("foo/csharp");
|
||||
|
||||
// verify that the items object that uses a macro works too
|
||||
// assert.equal((<any>(each.Raw))['items'][3], "two/two");
|
||||
// expect((<any>(each.Raw))['items'][3]).toEqual( "two/two");
|
||||
|
||||
// now, this got resolved alot earlier.
|
||||
// dunno if we need it the other way or not.
|
||||
assert.equal(each["items"][3], "one/two");
|
||||
expect(each.config["items"][3]).toEqual("one/two");
|
||||
}
|
||||
|
||||
// override the output-folder from the cmdline
|
||||
autorest.AddConfiguration({ "output-folder": "OUTPUT" });
|
||||
cfg = await autorest.view;
|
||||
assert.equal(cfg["output-folder"], "OUTPUT");
|
||||
const updatedContext = await autorest.view;
|
||||
expect(updatedContext.config.raw["output-folder"]).toEqual("OUTPUT");
|
||||
|
||||
for (const each of cfg.GetNestedConfiguration("csharp")) {
|
||||
assert.equal(each["output-folder"], "OUTPUT/csharp");
|
||||
for (const each of updatedContext.getNestedConfiguration("csharp")) {
|
||||
expect(each.config.raw["output-folder"]).toEqual("OUTPUT/csharp");
|
||||
}
|
||||
});
|
||||
|
||||
it("Test Guards", async () => {
|
||||
// test out subscribe
|
||||
|
||||
const f = new MemoryFileSystem(
|
||||
new Map<string, string>([
|
||||
[
|
||||
|
@ -134,13 +133,13 @@ value:
|
|||
|
||||
const autorest = new AutoRest.AutoRest(f, MemoryFileSystem.DefaultVirtualRootUri + "readme.md");
|
||||
autorest.AddConfiguration({ foo: true });
|
||||
let cfg = await autorest.view;
|
||||
let context = await autorest.view;
|
||||
|
||||
// output folder should be 'foo'
|
||||
assert.deepEqual(cfg["value"], ["not_bar", "foo_and_not_bar", "foo"]);
|
||||
expect(context.config.raw["value"]).toEqual(["not_bar", "foo_and_not_bar", "foo"]);
|
||||
|
||||
autorest.AddConfiguration({ bar: true });
|
||||
cfg = await autorest.view;
|
||||
assert.deepEqual(cfg["value"], ["bar", "foo_and_bar", "foo"]);
|
||||
context = await autorest.view;
|
||||
expect(context.config.raw["value"]).toEqual(["bar", "foo_and_bar", "foo"]);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -50,8 +50,8 @@ describe("EndToEnd", () => {
|
|||
);
|
||||
// PumpMessagesToConsole(autoRest);
|
||||
|
||||
const config = await autoRest.view;
|
||||
assert.strictEqual(config["shouldwork"], true);
|
||||
const context = await autoRest.view;
|
||||
assert.strictEqual(context.config["shouldwork"], true);
|
||||
});
|
||||
|
||||
// todo: skipping because testing is broken?
|
||||
|
@ -71,7 +71,7 @@ describe("EndToEnd", () => {
|
|||
});
|
||||
|
||||
const config = await autoRest.view;
|
||||
assert.strictEqual(config.InputFileUris.length, 1);
|
||||
assert.strictEqual(config.config.inputFileUris.length, 1);
|
||||
|
||||
const messages: Array<Message> = [];
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ describe("OpenAPI3Loading", () => {
|
|||
const config = await autoRest.view;
|
||||
const dataStore = config.DataStore;
|
||||
|
||||
const inputFilesUris = [];
|
||||
const inputFilesUris: string[] = [];
|
||||
|
||||
const OpenAPIFilesLoaded = await LoadLiterateOpenAPIs(
|
||||
config,
|
||||
|
|
|
@ -12,7 +12,7 @@ describe("SwaggerLoading", () => {
|
|||
const config = await autoRest.view;
|
||||
const dataStore = config.DataStore;
|
||||
|
||||
const inputFilesUris = [];
|
||||
const inputFilesUris: string[] = [];
|
||||
|
||||
const swaggerFilesLoaded = await LoadLiterateSwaggers(
|
||||
config,
|
||||
|
|
|
@ -1,88 +0,0 @@
|
|||
/* eslint-disable no-console */
|
||||
import * as cp from "child_process";
|
||||
import * as rpc from "vscode-jsonrpc";
|
||||
|
||||
async function connect() {
|
||||
const childProcess = cp.spawn("dotnet", [
|
||||
`${__dirname}/../../../core/AutoRest/bin/netcoreapp1.0/AutoRest.dll`,
|
||||
"--server",
|
||||
]);
|
||||
|
||||
// Use stdin and stdout for communication:
|
||||
const connection = rpc.createMessageConnection(
|
||||
new rpc.StreamMessageReader(childProcess.stdout),
|
||||
new rpc.StreamMessageWriter(childProcess.stdin),
|
||||
console,
|
||||
);
|
||||
|
||||
// host interface
|
||||
connection.onNotification(
|
||||
new rpc.NotificationType4<string, string, string, any, void>("WriteFile"),
|
||||
(sessionId: string, filename: string, content: string, sourcemap: any) => {
|
||||
console.log(`Saving File ${sessionId}, ${filename}`);
|
||||
},
|
||||
);
|
||||
|
||||
connection.onNotification(
|
||||
new rpc.NotificationType3<string, any, any, void>("Message"),
|
||||
(sessionId: string, details: any, sourcemap: any) => {
|
||||
console.log(`You have posted message ${sessionId}, ${details}`);
|
||||
},
|
||||
);
|
||||
|
||||
connection.onRequest(
|
||||
new rpc.RequestType2<string, string, string, void, void>("ReadFile"),
|
||||
(sessionId: string, filename: string) => {
|
||||
return `You asked for the file ${filename} in the session ${sessionId}`;
|
||||
},
|
||||
);
|
||||
|
||||
connection.onRequest(
|
||||
new rpc.RequestType2<string, string, string, void, void>("GetValue"),
|
||||
(sessionId: string, key: string) => {
|
||||
return `You asked for the value ${key} in the session ${sessionId}`;
|
||||
},
|
||||
);
|
||||
|
||||
connection.onRequest(
|
||||
new rpc.RequestType2<string, string | undefined, Array<string>, void, void>("ListInputs"),
|
||||
(sessionId: string) => {
|
||||
return ["a.txt", "b.txt"];
|
||||
},
|
||||
);
|
||||
|
||||
// extension interface
|
||||
const EnumeratePlugins = new rpc.RequestType0<Array<string>, void, void>("GetPluginNames");
|
||||
const Process = (plugin: string, session: string) =>
|
||||
connection.sendRequest(new rpc.RequestType2<string, string, boolean, void, void>("Process"), plugin, session);
|
||||
const Shutdown = () => connection.sendNotification(new rpc.NotificationType0<void>("Shutdown"));
|
||||
|
||||
childProcess.stderr.pipe(process.stdout);
|
||||
connection.listen();
|
||||
|
||||
console.log("before enumerate");
|
||||
const values = await connection.sendRequest(EnumeratePlugins);
|
||||
for (const each of values) {
|
||||
console.log(each);
|
||||
}
|
||||
console.log("after enumerate");
|
||||
|
||||
console.log("calling process");
|
||||
const result = await Process("Modeler", "session1");
|
||||
console.log(`done process: ${result} `);
|
||||
|
||||
Shutdown();
|
||||
|
||||
// wait for shutdown!
|
||||
await new Promise<void>((resolve) => {
|
||||
setTimeout(() => {
|
||||
resolve();
|
||||
}, 200);
|
||||
});
|
||||
}
|
||||
|
||||
describe("TestConnectivity", () => {
|
||||
xit("E2E", async () => {
|
||||
await connect();
|
||||
});
|
||||
});
|
|
@ -2,8 +2,7 @@
|
|||
"extends": "../../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist",
|
||||
"noImplicitAny": false,
|
||||
"types": ["node", "jest"],
|
||||
"types": ["node", "jest"]
|
||||
},
|
||||
"include": ["src/**/*.ts", "test/**/*.ts"],
|
||||
"include": ["src/**/*.ts", "test/**/*.ts"]
|
||||
}
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
parser: "@typescript-eslint/parser"
|
||||
extends:
|
||||
- "../../../.default-eslintrc.yaml"
|
|
@ -0,0 +1,10 @@
|
|||
// @ts-check
|
||||
|
||||
const defaultConfig = require("../../../jest.default.config");
|
||||
|
||||
const config = {
|
||||
...defaultConfig,
|
||||
testMatch: ["<rootDir>/src/**/*.test.ts", "<rootDir>/test/**/*.test.ts"],
|
||||
};
|
||||
|
||||
module.exports = config;
|
|
@ -0,0 +1,39 @@
|
|||
{
|
||||
"name": "@autorest/configuration",
|
||||
"version": "1.0.0",
|
||||
"description": "Autorest configuration",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"scripts": {
|
||||
"build": "tsc -p ./tsconfig.build.json",
|
||||
"watch": "tsc -p ./tsconfig.build.json --watch",
|
||||
"lint:fix": "eslint ./src --fix --ext .ts",
|
||||
"lint": "eslint ./src --ext .ts --max-warnings=0",
|
||||
"prepare": "npm run build",
|
||||
"test": "jest --watchAll --coverage=false",
|
||||
"test:ci": "jest --ci",
|
||||
"clean": "rimraf ./dist"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/Azure/autorest.git"
|
||||
},
|
||||
"author": "Microsoft Corporation",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/Azure/autorest/issues"
|
||||
},
|
||||
"homepage": "https://github.com/Azure/autorest#readme",
|
||||
"devDependencies": {
|
||||
"@types/jest": "^26.0.20",
|
||||
"@types/node": "~14.14.20",
|
||||
"@typescript-eslint/eslint-plugin": "^4.12.0",
|
||||
"@typescript-eslint/parser": "^4.12.0",
|
||||
"eslint-plugin-prettier": "~3.2.0",
|
||||
"eslint-plugin-unicorn": "~27.0.0",
|
||||
"eslint": "^7.17.0",
|
||||
"jest": "^26.6.3",
|
||||
"rimraf": "^3.0.2",
|
||||
"typescript": "~4.1.3"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
import { AutorestRawConfiguration } from "./autorest-raw-configuration";
|
||||
|
||||
// TODO-TIM don't extend
|
||||
export interface AutorestConfiguration extends AutorestRawConfiguration {
|
||||
/**
|
||||
* Raw configuration that was used to build this config
|
||||
*/
|
||||
raw: AutorestRawConfiguration;
|
||||
|
||||
configFileFolderUri: string;
|
||||
|
||||
inputFileUris: string[];
|
||||
|
||||
/**
|
||||
* Path to the output folder.
|
||||
*/
|
||||
outputFolderUri: string;
|
||||
|
||||
// TODO-TIM check type?
|
||||
configurationFiles: { [key: string]: any };
|
||||
|
||||
/**
|
||||
* If help was requested.
|
||||
*/
|
||||
help: boolean;
|
||||
|
||||
/**
|
||||
* If logging should be verbose.
|
||||
*/
|
||||
verbose: boolean;
|
||||
|
||||
/**
|
||||
* If running in debug mode.
|
||||
*/
|
||||
debug: boolean;
|
||||
|
||||
/**
|
||||
* If running in caching mode.
|
||||
*/
|
||||
cachingEnabled: boolean;
|
||||
|
||||
/**
|
||||
* list of files to exclude from caching.
|
||||
*/
|
||||
cacheExclude: string[];
|
||||
|
||||
// TODO-TIM check those?
|
||||
name?: string;
|
||||
to?: string;
|
||||
}
|
|
@ -1,12 +1,10 @@
|
|||
import { evaluateGuard } from "../parsing/literate-yaml";
|
||||
import { MergeOverwriteOrAppend } from "../source-map/merging";
|
||||
import { Directive } from "./directive";
|
||||
|
||||
/**
|
||||
* Represent a raw configuration provided by the user.
|
||||
* i.e. The mapping of values passed via a config block, cli arguments, etc.
|
||||
*/
|
||||
export interface AutoRestRawConfiguration {
|
||||
export interface AutorestRawConfiguration {
|
||||
"__info"?: string | null;
|
||||
"__parents"?: any | undefined;
|
||||
"allow-no-input"?: boolean;
|
||||
|
@ -17,10 +15,14 @@ export interface AutoRestRawConfiguration {
|
|||
"declare-directive"?: { [name: string]: string };
|
||||
"output-artifact"?: Array<string> | string;
|
||||
"message-format"?: "json" | "yaml" | "regular";
|
||||
"use"?: any[];
|
||||
"use-extension"?: { [extensionName: string]: string };
|
||||
"require"?: Array<string> | string;
|
||||
"try-require"?: Array<string> | string;
|
||||
"help"?: any;
|
||||
"pass-thru"?: any[];
|
||||
"disable-validation"?: boolean;
|
||||
"cache"?: any;
|
||||
"vscode"?: any; // activates VS Code specific behavior and does *NOT* influence the core's behavior (only consumed by VS Code extension)
|
||||
|
||||
"override-info"?: any; // make sure source maps are pulling it! (see "composite swagger" method)
|
||||
|
@ -71,37 +73,10 @@ export interface AutoRestRawConfiguration {
|
|||
"debugger"?: any;
|
||||
|
||||
"github-auth-token"?: string;
|
||||
|
||||
// TODO-TIM check what is this?
|
||||
"name"?: string;
|
||||
"to"?: string;
|
||||
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
export const mergeConfigurations = (...configs: Array<AutoRestRawConfiguration>): AutoRestRawConfiguration => {
|
||||
let result: AutoRestRawConfiguration = {};
|
||||
configs = configs
|
||||
.map((each, i, a) => ({ ...each, "load-priority": each["load-priority"] || -i }))
|
||||
.sort((a, b) => b["load-priority"] - a["load-priority"]);
|
||||
// if they say --profile: or --api-version: (or in config) then we force it to set the tag=all-api-versions
|
||||
// Some of the rest specs had a default tag set (really shouldn't have done that), which ... was problematic,
|
||||
// so this enables us to override that in the case they are asking for filtering to a profile or a api-verison
|
||||
|
||||
const forceAllVersionsMode = !!configs.find((each) => each["api-version"]?.length || each.profile?.length || 0 > 0);
|
||||
for (const config of configs) {
|
||||
result = mergeConfiguration(result, config, forceAllVersionsMode);
|
||||
}
|
||||
result["load-priority"] = undefined;
|
||||
return result;
|
||||
};
|
||||
|
||||
// TODO: operate on DataHandleRead and create source map!
|
||||
export const mergeConfiguration = (
|
||||
higherPriority: AutoRestRawConfiguration,
|
||||
lowerPriority: AutoRestRawConfiguration,
|
||||
forceAllVersionsMode = false,
|
||||
): AutoRestRawConfiguration => {
|
||||
// check guard
|
||||
if (lowerPriority.__info && !evaluateGuard(lowerPriority.__info, higherPriority, forceAllVersionsMode)) {
|
||||
// guard false? => skip
|
||||
return higherPriority;
|
||||
}
|
||||
|
||||
// merge
|
||||
return MergeOverwriteOrAppend(higherPriority, lowerPriority);
|
||||
};
|
|
@ -0,0 +1,15 @@
|
|||
import { ResolvedDirective } from "./directive";
|
||||
|
||||
describe("Directive", () => {
|
||||
it("resolve propertis", () => {
|
||||
const resolved = new ResolvedDirective({
|
||||
from: "foo",
|
||||
transform: ["123", "456"],
|
||||
});
|
||||
|
||||
expect(resolved.from).toEqual(["foo"]);
|
||||
expect(resolved.transform).toEqual(["123", "456"]);
|
||||
expect(resolved.reason).toEqual(undefined);
|
||||
expect(resolved.suppress).toEqual([]);
|
||||
});
|
||||
});
|
|
@ -1,31 +1,29 @@
|
|||
import { Initializer } from "@azure-tools/codegen";
|
||||
import { arrayOf } from "./utils";
|
||||
|
||||
export interface Directive {
|
||||
from?: Array<string> | string;
|
||||
where?: Array<string> | string;
|
||||
reason?: string;
|
||||
"from"?: string[] | string;
|
||||
"where"?: string[] | string;
|
||||
"reason"?: string;
|
||||
|
||||
// one of:
|
||||
suppress?: Array<string> | string;
|
||||
set?: Array<string> | string;
|
||||
transform?: Array<string> | string;
|
||||
test?: Array<string> | string;
|
||||
"suppress"?: string[] | string;
|
||||
"set"?: string[] | string;
|
||||
"transform"?: string[] | string;
|
||||
"text-transform"?: string[] | string;
|
||||
"test"?: string[] | string;
|
||||
}
|
||||
|
||||
export class ResolvedDirective extends Initializer {
|
||||
from: Array<string>;
|
||||
where: Array<string>;
|
||||
export class ResolvedDirective {
|
||||
from: string[];
|
||||
where: string[];
|
||||
reason?: string;
|
||||
suppress: Array<string>;
|
||||
transform: Array<string>;
|
||||
test: Array<string>;
|
||||
suppress: string[];
|
||||
transform: string[];
|
||||
test: string[];
|
||||
|
||||
constructor(directive: Directive) {
|
||||
super();
|
||||
|
||||
// copy untyped content over
|
||||
this.apply(directive);
|
||||
Object.assign(this, directive);
|
||||
|
||||
// normalize typed content
|
||||
this.from = arrayOf(directive["from"]);
|
|
@ -0,0 +1,4 @@
|
|||
export * from "./autorest-configuration";
|
||||
export * from "./autorest-raw-configuration";
|
||||
export * from "./directive";
|
||||
export * from "./utils";
|
|
@ -0,0 +1,27 @@
|
|||
import { type } from "os";
|
||||
import { arrayOf } from "./utils";
|
||||
|
||||
describe("Utils", () => {
|
||||
describe("arrayOf", () => {
|
||||
it("returns as it is if its already an array", () => {
|
||||
expect(arrayOf(["abc", "def"])).toEqual(["abc", "def"]);
|
||||
});
|
||||
|
||||
it("wraps it in an array if it is only a primitive type", () => {
|
||||
expect(arrayOf("abc")).toEqual(["abc"]);
|
||||
expect(arrayOf(123)).toEqual([123]);
|
||||
expect(arrayOf(true)).toEqual([true]);
|
||||
});
|
||||
|
||||
it("wraps it in an array if it is only an object type", () => {
|
||||
expect(arrayOf({ foo: "bar" })).toEqual([{ foo: "bar" }]);
|
||||
});
|
||||
|
||||
// This test is just for types. The processed input should be assignable to the output variable.
|
||||
it("has correct type", () => {
|
||||
const input: string[] | string | undefined = <any>"foo";
|
||||
const output: string[] = arrayOf(input);
|
||||
expect(output).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,25 @@
|
|||
export function isIterable(target: any): target is Iterable<any> {
|
||||
return !!target && typeof target[Symbol.iterator] === "function";
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a configuration value that can be either an array, a single value or empty and returns an array with all values.
|
||||
* @param value Value to wrap in an array.
|
||||
* @returns Array of all the values.
|
||||
*/
|
||||
export function arrayOf<T>(value: T | T[] | undefined): T[] {
|
||||
if (value === undefined) {
|
||||
return [];
|
||||
}
|
||||
|
||||
switch (typeof value) {
|
||||
case "string": // Need to do this case as String is iterable.
|
||||
return [value];
|
||||
case "object":
|
||||
if (isIterable(value)) {
|
||||
return [...value];
|
||||
}
|
||||
break;
|
||||
}
|
||||
return [value];
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"exclude": ["**/*.test.*", "test/**/*"]
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"extends": "../../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist"
|
||||
},
|
||||
"include": ["src/**/*.ts", "test/**/*.ts"]
|
||||
}
|
|
@ -35,6 +35,7 @@
|
|||
"source-map-support": "^0.5.19"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "~14.14.20",
|
||||
"@types/source-map-support": "^0.5.3",
|
||||
"eslint-plugin-prettier": "~3.2.0",
|
||||
"eslint-plugin-unicorn": "~27.0.0",
|
||||
|
|
|
@ -84,6 +84,12 @@
|
|||
"reviewCategory": "production",
|
||||
"shouldPublish": true
|
||||
},
|
||||
{
|
||||
"packageName": "@autorest/configuration",
|
||||
"projectFolder": "packages/libs/configuration",
|
||||
"reviewCategory": "production",
|
||||
"shouldPublish": true
|
||||
},
|
||||
{
|
||||
"packageName": "@autorest/test-utils",
|
||||
"projectFolder": "packages/testing/test-utils",
|
||||
|
|
Загрузка…
Ссылка в новой задаче