Adding LU Parser folder and JS npm module (#6018)
This commit is contained in:
Родитель
0acb9e351f
Коммит
3d7cfc532f
|
@ -0,0 +1,231 @@
|
|||
lexer grammar LUFileLexer;
|
||||
|
||||
// fragments
|
||||
fragment A: 'a' | 'A';
|
||||
fragment B: 'b' | 'B';
|
||||
fragment C: 'c' | 'C';
|
||||
fragment D: 'd' | 'D';
|
||||
fragment E: 'e' | 'E';
|
||||
fragment F: 'f' | 'F';
|
||||
fragment G: 'g' | 'G';
|
||||
fragment H: 'h' | 'H';
|
||||
fragment I: 'i' | 'I';
|
||||
fragment J: 'j' | 'J';
|
||||
fragment K: 'k' | 'K';
|
||||
fragment L: 'l' | 'L';
|
||||
fragment M: 'm' | 'M';
|
||||
fragment N: 'n' | 'N';
|
||||
fragment O: 'o' | 'O';
|
||||
fragment P: 'p' | 'P';
|
||||
fragment Q: 'q' | 'Q';
|
||||
fragment R: 'r' | 'R';
|
||||
fragment S: 's' | 'S';
|
||||
fragment T: 't' | 'T';
|
||||
fragment U: 'u' | 'U';
|
||||
fragment V: 'v' | 'V';
|
||||
fragment W: 'w' | 'W';
|
||||
fragment X: 'x' | 'X';
|
||||
fragment Y: 'y' | 'Y';
|
||||
fragment Z: 'z' | 'Z';
|
||||
|
||||
fragment LETTER: 'a'..'z' | 'A'..'Z';
|
||||
fragment NUMBER: '0'..'9';
|
||||
|
||||
fragment WHITESPACE
|
||||
: ' '|'\t'|'\ufeff'|'\u00a0'
|
||||
;
|
||||
|
||||
fragment UTTERANCE_MARK: '-' | '*' | '+';
|
||||
|
||||
WS
|
||||
: WHITESPACE+
|
||||
;
|
||||
|
||||
NEWLINE
|
||||
: '\r'? '\n' -> skip
|
||||
;
|
||||
|
||||
QNA_SOURCE_INFO
|
||||
: WS* '>' WS* '!# @qna.pair.source' WS* '=' ~('\r'|'\n')+
|
||||
;
|
||||
|
||||
MODEL_INFO
|
||||
: WS* '>' WS* '!#' ~('\r'|'\n')+
|
||||
;
|
||||
|
||||
COMMENT
|
||||
: WS* '>' ~('\r'|'\n')* -> skip
|
||||
;
|
||||
|
||||
QNA
|
||||
: '#'+ WS* '?' -> pushMode(QNA_MODE)
|
||||
;
|
||||
|
||||
HASH
|
||||
: '#' -> pushMode(INTENT_NAME_MODE)
|
||||
;
|
||||
|
||||
DASH
|
||||
: UTTERANCE_MARK -> pushMode(LIST_BODY_MODE)
|
||||
;
|
||||
|
||||
DOLLAR
|
||||
: '$' -> pushMode(ENTITY_MODE)
|
||||
;
|
||||
|
||||
AT
|
||||
: '@' -> pushMode(NEW_ENTITY_MODE)
|
||||
;
|
||||
|
||||
IMPORT
|
||||
: '[' ~[\r\n[\]]*? ']' '(' ~[\r\n()]*? ')'
|
||||
;
|
||||
|
||||
FILTER_MARK
|
||||
: '**' F I L T E R S ':**'
|
||||
;
|
||||
|
||||
QNA_ID_MARK
|
||||
: '<a' .*? '</a>'
|
||||
;
|
||||
|
||||
MULTI_LINE_TEXT
|
||||
: '```' .*? '```'
|
||||
;
|
||||
PROMPT_MARK
|
||||
: '**' P R O M P T S ':**'
|
||||
;
|
||||
|
||||
INVALID_TOKEN_DEFAULT_MODE
|
||||
: .
|
||||
;
|
||||
|
||||
mode NEW_ENTITY_MODE;
|
||||
|
||||
WS_IN_NEW_ENTITY
|
||||
: WS -> type(WS)
|
||||
;
|
||||
|
||||
NEWLINE_IN_NEW_ENTITY
|
||||
: '\r'? '\n' -> type(NEWLINE), popMode
|
||||
;
|
||||
|
||||
EQUAL
|
||||
: '='
|
||||
;
|
||||
|
||||
COMMA
|
||||
: ','
|
||||
;
|
||||
|
||||
HAS_ROLES_LABEL
|
||||
: H A S R O L E S?
|
||||
;
|
||||
|
||||
HAS_FEATURES_LABEL
|
||||
: U S E S F E A T U R E S?
|
||||
;
|
||||
|
||||
NEW_ENTITY_TYPE_IDENTIFIER
|
||||
: S I M P L E | L I S T | R E G E X | P R E B U I L T | C O M P O S I T E | M L | P A T T E R N A N Y | P H R A S E L I S T | I N T E N T
|
||||
;
|
||||
|
||||
PHRASE_LIST_LABEL
|
||||
: '(' (~[\r\n])* ')'
|
||||
;
|
||||
|
||||
NEW_COMPOSITE_ENTITY
|
||||
: '[' (~[\r\n{}[\]()])* ']'
|
||||
;
|
||||
|
||||
NEW_REGEX_ENTITY
|
||||
: '/' (~[\r\n])*
|
||||
;
|
||||
|
||||
NEW_ENTITY_IDENTIFIER
|
||||
: (~[ \t\r\n,;'"])+
|
||||
;
|
||||
|
||||
NEW_ENTITY_IDENTIFIER_WITH_WS
|
||||
: ('\'' | '"') (~[\t\r\n,;'"])+ ('\'' | '"')
|
||||
;
|
||||
|
||||
mode INTENT_NAME_MODE;
|
||||
|
||||
WS_IN_NAME
|
||||
: WS -> type(WS)
|
||||
;
|
||||
|
||||
HASH_IN_NAME
|
||||
: '#' -> type(HASH)
|
||||
;
|
||||
|
||||
NEWLINE_IN_NAME
|
||||
: '\r'? '\n' -> skip, popMode
|
||||
;
|
||||
|
||||
IDENTIFIER
|
||||
: (LETTER | NUMBER | '_') (LETTER | NUMBER | '-' | '_')*
|
||||
;
|
||||
|
||||
DOT
|
||||
: '.'
|
||||
;
|
||||
|
||||
mode LIST_BODY_MODE;
|
||||
|
||||
WS_IN_LIST_BODY
|
||||
: WS -> type(WS)
|
||||
;
|
||||
|
||||
NEWLINE_IN_LIST_BODY
|
||||
: '\r'? '\n' -> type(NEWLINE), popMode
|
||||
;
|
||||
|
||||
ESCAPE_CHARACTER
|
||||
: '\\' ~[\r\n]?
|
||||
;
|
||||
|
||||
EXPRESSION
|
||||
: '{' (~[\r\n{}] | ('{' ~[\r\n]* '}'))* '}'
|
||||
;
|
||||
|
||||
TEXT
|
||||
: ~[ \t\r\n\\]+?
|
||||
;
|
||||
|
||||
mode ENTITY_MODE;
|
||||
|
||||
WS_IN_ENTITY
|
||||
: WS -> type(WS)
|
||||
;
|
||||
|
||||
NEWLINE_IN_ENTITY
|
||||
: '\r'? '\n' -> skip, popMode
|
||||
;
|
||||
|
||||
COMPOSITE_ENTITY
|
||||
: '[' (~[\r\n{}[\]()])* ']'
|
||||
;
|
||||
|
||||
REGEX_ENTITY
|
||||
: '/' (~[\r\n])*
|
||||
;
|
||||
|
||||
ENTITY_TEXT
|
||||
: ~[ \t\r\n:]+
|
||||
;
|
||||
|
||||
COLON_MARK
|
||||
: ':'
|
||||
;
|
||||
|
||||
mode QNA_MODE;
|
||||
|
||||
NEWLINE_IN_QNA
|
||||
: '\r'? '\n' -> skip, popMode
|
||||
;
|
||||
|
||||
QNA_TEXT
|
||||
: ~[\t\r\n]+
|
||||
;
|
|
@ -0,0 +1,241 @@
|
|||
parser grammar LUFileParser;
|
||||
|
||||
options { tokenVocab=LUFileLexer; }
|
||||
|
||||
file
|
||||
: paragraph+? EOF
|
||||
;
|
||||
|
||||
paragraph
|
||||
: newline
|
||||
| nestedIntentSection
|
||||
| simpleIntentSection
|
||||
| entitySection
|
||||
| newEntitySection
|
||||
| importSection
|
||||
| qnaSection
|
||||
| modelInfoSection
|
||||
;
|
||||
|
||||
// Treat EOF as newline to hanle file end gracefully
|
||||
// It's possible that parser doesn't even have to handle NEWLINE,
|
||||
// but before the syntax is finalized, we still keep the NEWLINE in grammer
|
||||
newline
|
||||
: WS* (NEWLINE | EOF)
|
||||
;
|
||||
|
||||
errorString
|
||||
: (WS|INVALID_TOKEN_DEFAULT_MODE)+
|
||||
;
|
||||
|
||||
nestedIntentSection
|
||||
: nestedIntentNameLine nestedIntentBodyDefinition
|
||||
;
|
||||
|
||||
nestedIntentNameLine
|
||||
: WS* HASH WS* nestedIntentName
|
||||
;
|
||||
|
||||
nestedIntentName
|
||||
: nameIdentifier (WS|nameIdentifier)*
|
||||
;
|
||||
|
||||
nameIdentifier
|
||||
: IDENTIFIER (DOT IDENTIFIER)*
|
||||
;
|
||||
|
||||
nestedIntentBodyDefinition
|
||||
: subIntentDefinition+
|
||||
;
|
||||
|
||||
subIntentDefinition
|
||||
: WS* HASH simpleIntentSection
|
||||
;
|
||||
|
||||
simpleIntentSection
|
||||
: intentDefinition
|
||||
;
|
||||
|
||||
intentDefinition
|
||||
: intentNameLine intentBody?
|
||||
;
|
||||
|
||||
intentNameLine
|
||||
: WS* HASH HASH? WS* intentName
|
||||
;
|
||||
|
||||
intentName
|
||||
: nameIdentifier (WS|nameIdentifier)*
|
||||
;
|
||||
|
||||
intentBody
|
||||
: WS* normalIntentBody
|
||||
;
|
||||
|
||||
normalIntentBody
|
||||
: WS* ((normalIntentString newline) | errorString)+
|
||||
;
|
||||
|
||||
normalIntentString
|
||||
: WS* DASH (WS|TEXT|EXPRESSION|ESCAPE_CHARACTER)*
|
||||
;
|
||||
|
||||
newEntitySection
|
||||
: newEntityDefinition
|
||||
;
|
||||
|
||||
newEntityDefinition
|
||||
: newEntityLine newEntityListbody?
|
||||
;
|
||||
|
||||
newEntityListbody
|
||||
: ((normalItemString newline) | errorString)+
|
||||
;
|
||||
|
||||
newEntityLine
|
||||
: WS* AT WS* newEntityType? WS* (newEntityName|newEntityNameWithWS)? WS* newEntityRoles? WS* newEntityUsesFeatures? WS* EQUAL? WS* (newCompositeDefinition|newRegexDefinition)? newline
|
||||
;
|
||||
|
||||
newCompositeDefinition
|
||||
: NEW_COMPOSITE_ENTITY
|
||||
;
|
||||
|
||||
newRegexDefinition
|
||||
: NEW_REGEX_ENTITY
|
||||
;
|
||||
|
||||
newEntityType
|
||||
: NEW_ENTITY_TYPE_IDENTIFIER
|
||||
;
|
||||
|
||||
newEntityRoles
|
||||
: HAS_ROLES_LABEL? WS* newEntityRoleOrFeatures
|
||||
;
|
||||
|
||||
newEntityUsesFeatures
|
||||
: HAS_FEATURES_LABEL WS* newEntityRoleOrFeatures
|
||||
;
|
||||
|
||||
newEntityRoleOrFeatures
|
||||
: (NEW_ENTITY_IDENTIFIER|NEW_ENTITY_IDENTIFIER_WITH_WS) (WS* COMMA WS* (NEW_ENTITY_IDENTIFIER|NEW_ENTITY_IDENTIFIER_WITH_WS))*
|
||||
;
|
||||
|
||||
newEntityName
|
||||
: NEW_ENTITY_IDENTIFIER (WS* PHRASE_LIST_LABEL)?
|
||||
;
|
||||
|
||||
newEntityNameWithWS
|
||||
: NEW_ENTITY_IDENTIFIER_WITH_WS (WS* PHRASE_LIST_LABEL)?
|
||||
;
|
||||
|
||||
entitySection
|
||||
: entityDefinition
|
||||
;
|
||||
|
||||
entityDefinition
|
||||
: entityLine entityListBody?
|
||||
;
|
||||
|
||||
entityLine
|
||||
: WS* DOLLAR entityName? COLON_MARK? entityType?
|
||||
;
|
||||
|
||||
entityName
|
||||
: (ENTITY_TEXT|WS)+
|
||||
;
|
||||
|
||||
entityType
|
||||
: (compositeEntityIdentifier|regexEntityIdentifier|ENTITY_TEXT|COLON_MARK|WS)+
|
||||
;
|
||||
|
||||
compositeEntityIdentifier
|
||||
: COMPOSITE_ENTITY
|
||||
;
|
||||
|
||||
regexEntityIdentifier
|
||||
: REGEX_ENTITY
|
||||
;
|
||||
|
||||
entityListBody
|
||||
: ((normalItemString newline) | errorString)+
|
||||
;
|
||||
|
||||
normalItemString
|
||||
: WS* DASH (WS|TEXT|EXPRESSION|ESCAPE_CHARACTER)*
|
||||
;
|
||||
|
||||
importSection
|
||||
: importDefinition
|
||||
;
|
||||
|
||||
importDefinition
|
||||
: WS* IMPORT WS*
|
||||
;
|
||||
|
||||
qnaSection
|
||||
: qnaDefinition
|
||||
;
|
||||
|
||||
qnaDefinition
|
||||
: qnaSourceInfo? qnaIdMark? qnaQuestion moreQuestionsBody qnaAnswerBody promptSection?
|
||||
;
|
||||
|
||||
qnaSourceInfo
|
||||
: WS* QNA_SOURCE_INFO
|
||||
;
|
||||
|
||||
qnaIdMark
|
||||
: WS* QNA_ID_MARK
|
||||
;
|
||||
|
||||
qnaQuestion
|
||||
: WS* QNA questionText
|
||||
;
|
||||
|
||||
questionText
|
||||
: QNA_TEXT*
|
||||
;
|
||||
|
||||
moreQuestionsBody
|
||||
: WS* ((moreQuestion newline) | errorQuestionString)*
|
||||
;
|
||||
|
||||
moreQuestion
|
||||
: DASH (WS|TEXT)*
|
||||
;
|
||||
|
||||
errorQuestionString
|
||||
: (WS|INVALID_TOKEN_DEFAULT_MODE)+
|
||||
;
|
||||
|
||||
qnaAnswerBody
|
||||
: ((filterSection? multiLineAnswer)|(multiLineAnswer filterSection?))
|
||||
;
|
||||
|
||||
filterSection
|
||||
: WS* FILTER_MARK (filterLine | errorFilterLine)+
|
||||
;
|
||||
|
||||
promptSection
|
||||
: WS* PROMPT_MARK (filterLine | errorFilterLine)+
|
||||
;
|
||||
|
||||
filterLine
|
||||
: WS* DASH (WS|TEXT)* newline
|
||||
;
|
||||
|
||||
errorFilterLine
|
||||
: (WS|INVALID_TOKEN_DEFAULT_MODE)+
|
||||
;
|
||||
|
||||
multiLineAnswer
|
||||
: WS* MULTI_LINE_TEXT
|
||||
;
|
||||
|
||||
modelInfoSection
|
||||
: modelInfoDefinition
|
||||
;
|
||||
|
||||
modelInfoDefinition
|
||||
: WS* MODEL_INFO
|
||||
;
|
|
@ -0,0 +1,73 @@
|
|||
steps:
|
||||
- bash: |
|
||||
short_hash=`git rev-parse --short=7 HEAD` ## At least 7 digits, more if needed for uniqueness
|
||||
echo "Full git hash: $(Build.SourceVersion)"
|
||||
echo "Short git hash: $short_hash"
|
||||
echo "##vso[task.setvariable variable=short_hash]$short_hash" ## Store variable for subsequent steps
|
||||
workingDirectory: $(Build.SourcesDirectory)
|
||||
displayName: Get commit number
|
||||
|
||||
- powershell: |
|
||||
$date = (Get-Date).ToString("yyyyMMdd")
|
||||
echo "##vso[task.setvariable variable=buildDate]$date" ## Store variable for subsequent steps
|
||||
displayName: Get formatted date
|
||||
|
||||
- bash: |
|
||||
version=`echo "$(version)"`
|
||||
_version=`echo "$(_version)"`
|
||||
echo "version: $version"
|
||||
echo "_version: $_version"
|
||||
[ -z "$version" ] && buildVersion=$_version.$(buildDate).$(short_hash) || buildVersion=$version
|
||||
echo "buildVersion: $buildVersion"
|
||||
echo "##vso[task.setvariable variable=buildVersion]$buildVersion" ## Store variable for subsequent steps
|
||||
workingDirectory: $(Build.SourcesDirectory)
|
||||
displayName: Define Build Version
|
||||
|
||||
- task: colinsalmcorner.colinsalmcorner-buildtasks.tag-build-task.tagBuildOrRelease@0
|
||||
displayName: 'Tag Build with version number'
|
||||
inputs:
|
||||
tags: 'Version=$(buildVersion)'
|
||||
continueOnError: true
|
||||
|
||||
- task: NodeTool@0
|
||||
displayName: 'Use Node 12.x'
|
||||
inputs:
|
||||
versionSpec: 12.x
|
||||
|
||||
- task: Npm@1
|
||||
displayName: 'npm install --global @microsoft/rush'
|
||||
inputs:
|
||||
command: custom
|
||||
verbose: false
|
||||
customCommand: 'install --global @microsoft/rush'
|
||||
|
||||
- script: 'rush update'
|
||||
displayName: 'rush update'
|
||||
|
||||
- script: 'rush build -p 2'
|
||||
displayName: 'rush build -p 2'
|
||||
|
||||
- script: 'rush test'
|
||||
displayName: 'rush test'
|
||||
|
||||
- script: 'node ./common/scripts/version-and-pack.js --version $(buildVersion)'
|
||||
displayName: 'Version and Pack'
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: 'Copy packages to: $(Build.ArtifactStagingDirectory)/drop'
|
||||
inputs:
|
||||
SourceFolder: ./.output
|
||||
Contents: '**/*.tgz'
|
||||
TargetFolder: '$(Build.ArtifactStagingDirectory)/drop'
|
||||
flattenFolders: true
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.ArtifactStagingDirectory)/drop'
|
||||
ArtifactName: drop
|
||||
|
||||
- powershell: 'Get-ChildItem .. -ErrorAction Continue -Recurse -Force | Where {$_.FullName -notlike "*node_modules*"}'
|
||||
displayName: 'Dir workspace except node_modules'
|
||||
continueOnError: true
|
||||
condition: succeededOrFailed()
|
|
@ -0,0 +1,24 @@
|
|||
#
|
||||
# Build Botframework-CLI Azure DevOps bits on Windows agent
|
||||
#
|
||||
|
||||
# "name" here defines the build number format. Build number is accessed via $(Build.BuildNumber)
|
||||
name: $(Build.BuildId)
|
||||
|
||||
pool:
|
||||
name: Hosted Windows 2019 with VS2019
|
||||
|
||||
pr: none
|
||||
trigger: none
|
||||
|
||||
variables:
|
||||
- template: botframework-cli-version.yml # Template reference ${{ variables.releaseVersion }}
|
||||
# version: define this in Azure, settable at queue time
|
||||
|
||||
stages:
|
||||
- stage: Build
|
||||
jobs:
|
||||
- job:
|
||||
steps:
|
||||
- script: echo '##vso[task.setvariable variable=_version]${{ variables.releaseVersion }}-devops
|
||||
- template: bf-cli-build-test-steps.yml
|
|
@ -0,0 +1,31 @@
|
|||
#
|
||||
# Build Botframework-CLI daily bits on Windows agent
|
||||
#
|
||||
|
||||
# "name" here defines the build number format. Build number is accessed via $(Build.BuildNumber)
|
||||
name: $(Build.BuildId)
|
||||
|
||||
pool:
|
||||
name: Hosted Windows 2019 with VS2019
|
||||
|
||||
pr: none
|
||||
trigger: none
|
||||
|
||||
schedules:
|
||||
- cron: "0 7 * * *"
|
||||
displayName: Daily midnight build for BETA branch (UTC Time)
|
||||
branches:
|
||||
include:
|
||||
- beta
|
||||
|
||||
variables:
|
||||
- template: botframework-cli-version.yml # Template reference ${{ variables.releaseVersion }}
|
||||
# version: define this in Azure, settable at queue time
|
||||
|
||||
stages:
|
||||
- stage: Build
|
||||
jobs:
|
||||
- job:
|
||||
steps:
|
||||
- script: echo '##vso[task.setvariable variable=_version]${{ variables.releaseVersion }}-beta
|
||||
- template: bf-cli-build-test-steps.yml
|
|
@ -0,0 +1,31 @@
|
|||
#
|
||||
# Build Botframework-CLI daily bits on Windows agent
|
||||
#
|
||||
|
||||
# "name" here defines the build number format. Build number is accessed via $(Build.BuildNumber)
|
||||
name: $(Build.BuildId)
|
||||
|
||||
pool:
|
||||
name: Hosted Windows 2019 with VS2019
|
||||
|
||||
pr: none
|
||||
trigger: none
|
||||
|
||||
schedules:
|
||||
- cron: "0 7 * * *"
|
||||
displayName: Daily midnight build (UTC Time)
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
|
||||
variables:
|
||||
- template: botframework-cli-version.yml # Template reference ${{ variables.releaseVersion }}
|
||||
# version: define this in Azure, settable at queue time
|
||||
|
||||
stages:
|
||||
- stage: Build
|
||||
jobs:
|
||||
- job:
|
||||
steps:
|
||||
- script: echo '##vso[task.setvariable variable=_version]${{ variables.releaseVersion }}-dev
|
||||
- template: bf-cli-build-test-steps.yml
|
|
@ -0,0 +1,37 @@
|
|||
#
|
||||
# Build Botframework-CLI on Mac (Linux) agent
|
||||
#
|
||||
|
||||
# "name" here defines the build number format. Build number is accessed via $(Build.BuildNumber)
|
||||
name: $(Build.BuildId)
|
||||
|
||||
pool:
|
||||
vmImage: 'macOS-10.15'
|
||||
|
||||
pr:
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
paths:
|
||||
include:
|
||||
- '*'
|
||||
exclude:
|
||||
- README.md
|
||||
- specs/*
|
||||
- PortingMap.md
|
||||
- ToolLifetimeSchedule.md
|
||||
- AzureCli.md
|
||||
- CONTRIBUTING.md
|
||||
- LICENSE
|
||||
- PRIVACY.md
|
||||
|
||||
variables:
|
||||
version: '1.0.0'
|
||||
# version: define this in Azure, settable at queue time
|
||||
|
||||
stages:
|
||||
- stage: Build
|
||||
jobs:
|
||||
- job:
|
||||
steps:
|
||||
- template: bf-cli-build-test-steps.yml
|
|
@ -0,0 +1,24 @@
|
|||
#
|
||||
# Build Botframework-CLI RC bits on Windows agent
|
||||
#
|
||||
|
||||
# "name" here defines the build number format. Build number is accessed via $(Build.BuildNumber)
|
||||
name: $(Build.BuildId)
|
||||
|
||||
pool:
|
||||
name: Hosted Windows 2019 with VS2019
|
||||
|
||||
pr: none
|
||||
trigger: none
|
||||
|
||||
variables:
|
||||
- template: botframework-cli-version.yml # Template reference ${{ variables.releaseVersion }}
|
||||
# version: define this in Azure, settable at queue time
|
||||
|
||||
stages:
|
||||
- stage: Build
|
||||
jobs:
|
||||
- job:
|
||||
steps:
|
||||
- script: echo '##vso[task.setvariable variable=_version]${{ variables.releaseVersion }}-rc
|
||||
- template: bf-cli-build-test-steps.yml
|
|
@ -0,0 +1,2 @@
|
|||
variables:
|
||||
releaseVersion: '4.11.0'
|
|
@ -0,0 +1,115 @@
|
|||
#
|
||||
# Build Botframework-CLI on Windows agent
|
||||
#
|
||||
|
||||
# "name" here defines the build number format. Build number is accessed via $(Build.BuildNumber)
|
||||
name: $(Build.BuildId)
|
||||
|
||||
pool:
|
||||
name: Hosted Windows 2019 with VS2019
|
||||
|
||||
pr:
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
- job: CLI
|
||||
variables:
|
||||
buildVersion: '4.10.0-preview.$(Build.BuildId)'
|
||||
_version: ${{coalesce(variables.version, variables.buildVersion)}}
|
||||
|
||||
steps:
|
||||
- task: colinsalmcorner.colinsalmcorner-buildtasks.tag-build-task.tagBuildOrRelease@0
|
||||
displayName: 'Tag Build with version number'
|
||||
inputs:
|
||||
tags: 'Version=$(_version)'
|
||||
continueOnError: true
|
||||
|
||||
- task: NodeTool@0
|
||||
displayName: 'Use Node 12.x'
|
||||
inputs:
|
||||
versionSpec: 12.x
|
||||
|
||||
- task: Npm@1
|
||||
displayName: 'npm install --global @microsoft/rush'
|
||||
inputs:
|
||||
command: custom
|
||||
verbose: false
|
||||
customCommand: 'install --global @microsoft/rush'
|
||||
|
||||
- script: 'rush update'
|
||||
displayName: 'rush update'
|
||||
|
||||
- script: 'rush build -p 2'
|
||||
displayName: 'rush build -p 2'
|
||||
|
||||
- script: 'rush coverage -v'
|
||||
displayName: 'rush coverage -v'
|
||||
|
||||
- bash: 'bash <(curl -s https://codecov.io/bash)'
|
||||
displayName: 'push coverage report to codecov.io - https://codecov.io/github/microsoft/botframework-cli'
|
||||
env:
|
||||
CODECOV_TOKEN: $(TokenForCodecov)
|
||||
|
||||
- powershell: |
|
||||
# If commit Build.SourceVersion exists in Github, we can show a nicer codecov.io URL
|
||||
$result = $(git rev-list HEAD..$(Build.SourceVersion) 2>&1);
|
||||
if ($result -like "*fatal*") { $Url = "https://codecov.io/github/microsoft/botframework-cli" }
|
||||
else { $Url = "https://codecov.io/github/microsoft/botframework-cli/commit/$(Build.SourceVersion)" };
|
||||
Write-Host "##vso[task.setvariable variable=CodecovUrl;]$Url"
|
||||
displayName: 'Set CodecovUrl'
|
||||
|
||||
- task: colinsalmcorner.colinsalmcorner-buildtasks.tag-build-task.tagBuildOrRelease@0
|
||||
displayName: 'Tag Build with coverage url'
|
||||
inputs:
|
||||
tags: '$(CodecovUrl)'
|
||||
continueOnError: true
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
displayName: 'Populate Code Coverage tab'
|
||||
inputs:
|
||||
codeCoverageTool: 'cobertura' # Options: cobertura, jaCoCo
|
||||
summaryFileLocation: $(System.DefaultWorkingDirectory)/packages/*/coverage/cobertura-coverage.xml
|
||||
continueOnError: true
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: 'Copy coverage files to: $(Build.StagingDirectory)/coverage'
|
||||
inputs:
|
||||
SourceFolder: '$(Build.SourcesDirectory)/packages'
|
||||
Contents: '*/coverage/**'
|
||||
TargetFolder: '$(Build.StagingDirectory)/coverage'
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: coverage'
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.ArtifactStagingDirectory)/coverage'
|
||||
ArtifactName: coverage
|
||||
|
||||
- script: 'rush posttest'
|
||||
displayName: 'rush posttest'
|
||||
|
||||
- script: 'node ./common/scripts/version-and-pack.js --version $(_version)'
|
||||
displayName: 'Version and Pack'
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: 'Copy packages to: $(Build.ArtifactStagingDirectory)/drop'
|
||||
inputs:
|
||||
SourceFolder: ./.output
|
||||
Contents: '**/*.tgz'
|
||||
TargetFolder: '$(Build.ArtifactStagingDirectory)/drop'
|
||||
flattenFolders: true
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.ArtifactStagingDirectory)/drop'
|
||||
ArtifactName: drop
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
|
||||
- powershell: 'Get-ChildItem .. -ErrorAction Continue -Recurse -Force | Where {$_.FullName -notlike "*node_modules*"}'
|
||||
displayName: 'Dir workspace except node_modules'
|
||||
continueOnError: true
|
||||
condition: succeededOrFailed()
|
|
@ -0,0 +1,12 @@
|
|||
# Rush uses this file to configure the package registry, regardless of whether the
|
||||
# package manager is PNPM, NPM, or Yarn. Prior to invoking the package manager,
|
||||
# Rush will always copy this file to the folder where installation is performed.
|
||||
# When NPM is the package manager, Rush works around NPM's processing of
|
||||
# undefined environment variables by deleting any lines that reference undefined
|
||||
# environment variables.
|
||||
#
|
||||
# DO NOT SPECIFY AUTHENTICATION CREDENTIALS IN THIS FILE. It should only be used
|
||||
# to configure registry sources.
|
||||
|
||||
registry=https://registry.npmjs.org/
|
||||
always-auth=false
|
|
@ -0,0 +1,273 @@
|
|||
/**
|
||||
* This configuration file defines custom commands for the "rush" command-line.
|
||||
* For full documentation, please see https://rushjs.io
|
||||
*/
|
||||
{
|
||||
"$schema": "https://developer.microsoft.com/json-schemas/rush/v5/command-line.schema.json",
|
||||
|
||||
/**
|
||||
* Custom "commands" introduce new verbs for the command-line. To see the help for these
|
||||
* example commands, try "rush --help", "rush my-bulk-command --help", or
|
||||
* "rush my-global-command --help".
|
||||
*/
|
||||
"commands": [
|
||||
{
|
||||
"commandKind": "bulk",
|
||||
"name": "test",
|
||||
"summary": "Run all tests",
|
||||
"description": "This runs all tests. NOTE: You must build at least once for tests run. If you modify a dependency and want to test that behavior change, you must also run build. We don't automatically rebuild to optimize for speed of single package development.",
|
||||
"enableParallelism": true,
|
||||
"allowWarningsInSuccessfulBuild": true
|
||||
},
|
||||
{
|
||||
"commandKind": "bulk",
|
||||
"name": "clean",
|
||||
"summary": "Cleans non-rush content. Use rush unlink/purge/tableflip.",
|
||||
"enableParallelism": true
|
||||
},
|
||||
{
|
||||
"commandKind": "global",
|
||||
"name": "pack",
|
||||
"summary": "Create .tgz packages for publishing in CI",
|
||||
"description": "This doesn't currently work because rush doesn't allow the passing of arbitrary commands like version numbers yet. \n Use node ./common/scripts/version-and-pack.js --version X.X.X-tag instead.",
|
||||
"shellCommand": "node -e \"throw new Error(`Not supported yet. \\nUse node ./common/scripts/version-and-pack.js --version X.X.X-tag instead`)\""
|
||||
},
|
||||
/**
|
||||
* Disable parallelism because it makes the results flakier
|
||||
*/
|
||||
{
|
||||
"commandKind": "bulk",
|
||||
"name": "coverage",
|
||||
"summary": "Run tests with coverage",
|
||||
"enableParallelism": false,
|
||||
"allowWarningsInSuccessfulBuild": true
|
||||
},
|
||||
{
|
||||
"commandKind": "bulk",
|
||||
"name": "doc",
|
||||
"summary": "Generate documentation",
|
||||
"enableParallelism": true
|
||||
},
|
||||
{
|
||||
"commandKind": "global",
|
||||
"name": "tableflip",
|
||||
"summary": "clean everything up",
|
||||
"shellCommand": "rush unlink & rush purge & npx rimraf ./node_modules ./packages/chatdown/node_modules ./packages/cli/node_modules ./packages/command/node_modules ./packages/config/node_modules ./packages/dialog/node_modules ./packages/lu/node_modules ./packages/luis/node_modules ./packages/qnamaker/node_modules & rush update",
|
||||
"safeForSimultaneousRushProcesses": true
|
||||
},
|
||||
{
|
||||
"commandKind": "bulk",
|
||||
"name": "posttest",
|
||||
"summary": "Run all posttest scripts",
|
||||
"description": "This runs all posttest scrips which perform linting",
|
||||
"enableParallelism": true,
|
||||
"allowWarningsInSuccessfulBuild": true
|
||||
}
|
||||
// {
|
||||
// /**
|
||||
// * (Required) Determines the type of custom command.
|
||||
// * Rush's "bulk" commands are invoked separately for each project. Rush will look in
|
||||
// * each project's package.json file for a "scripts" entry whose name matches the
|
||||
// * command name. By default, the command will run for every project in the repo,
|
||||
// * according to the dependency graph (similar to how "rush build" works).
|
||||
// * The set of projects can be restricted e.g. using the "--to" or "--from" parameters.
|
||||
// */
|
||||
// "commandKind": "bulk",
|
||||
//
|
||||
// /**
|
||||
// * (Required) The name that will be typed as part of the command line. This is also the name
|
||||
// * of the "scripts" hook in the project's package.json file.
|
||||
// * The name should be comprised of lower case words separated by hyphens or colons. The name should include an
|
||||
// * English verb (e.g. "deploy"). Use a hyphen to separate words (e.g. "upload-docs"). A group of related commands
|
||||
// * can be prefixed with a colon (e.g. "docs:generate", "docs:deploy", "docs:serve", etc).
|
||||
// */
|
||||
// "name": "my-bulk-command",
|
||||
//
|
||||
// /**
|
||||
// * (Required) A short summary of the custom command to be shown when printing command line
|
||||
// * help, e.g. "rush --help".
|
||||
// */
|
||||
// "summary": "Example bulk custom command",
|
||||
//
|
||||
// /**
|
||||
// * A detailed description of the command to be shown when printing command line
|
||||
// * help (e.g. "rush --help my-command").
|
||||
// * If omitted, the "summary" text will be shown instead.
|
||||
// *
|
||||
// * Whenever you introduce commands/parameters, taking a little time to write meaningful
|
||||
// * documentation can make a big difference for the developer experience in your repo.
|
||||
// */
|
||||
// "description": "This is an example custom command that runs separately for each project",
|
||||
//
|
||||
// /**
|
||||
// * By default, Rush operations acquire a lock file which prevents multiple commands from executing simultaneously
|
||||
// * in the same repo folder. (For example, it would be a mistake to run "rush install" and "rush build" at the
|
||||
// * same time.) If your command makes sense to run concurrently with other operations,
|
||||
// * set "safeForSimultaneousRushProcesses" to true to disable this protection.
|
||||
// *
|
||||
// * In particular, this is needed for custom scripts that invoke other Rush commands.
|
||||
// */
|
||||
// "safeForSimultaneousRushProcesses": false,
|
||||
//
|
||||
// /**
|
||||
// * (Required) If true, then this command is safe to be run in parallel, i.e. executed
|
||||
// * simultaneously for multiple projects. Similar to "rush build", regardless of parallelism
|
||||
// * projects will not start processing until their dependencies have completed processing.
|
||||
// */
|
||||
// "enableParallelism": false,
|
||||
//
|
||||
// /**
|
||||
// * Normally projects will be processed according to their dependency order: a given project will not start
|
||||
// * processing the command until all of its dependencies have completed. This restriction doesn't apply for
|
||||
// * certain operations, for example a "clean" task that deletes output files. In this case
|
||||
// * you can set "ignoreDependencyOrder" to true to increase parallelism.
|
||||
// */
|
||||
// "ignoreDependencyOrder": false,
|
||||
//
|
||||
// /**
|
||||
// * Normally Rush requires that each project's package.json has a "scripts" entry matching
|
||||
// * the custom command name. To disable this check, set "ignoreMissingScript" to true;
|
||||
// * projects with a missing definition will be skipped.
|
||||
// */
|
||||
// "ignoreMissingScript": false,
|
||||
//
|
||||
// /**
|
||||
// * When invoking shell scripts, Rush uses a heuristic to distinguish errors from warnings:
|
||||
// * - If the shell script returns a nonzero process exit code, Rush interprets this as "one or more errors".
|
||||
// * Error output is displayed in red, and it prevents Rush from attempting to process any downstream projects.
|
||||
// * - If the shell script returns a zero process exit code but writes something to its stderr stream,
|
||||
// * Rush interprets this as "one or more warnings". Warning output is printed in yellow, but does NOT prevent
|
||||
// * Rush from processing downstream projects.
|
||||
// *
|
||||
// * Thus, warnings do not interfere with local development, but they will cause a CI job to fail, because
|
||||
// * the Rush process itself returns a nonzero exit code if there are any warnings or errors. This is by design.
|
||||
// * In an active monorepo, we've found that if you allow any warnings in your master branch, it inadvertently
|
||||
// * teaches developers to ignore warnings, which quickly leads to a situation where so many "expected" warnings
|
||||
// * have accumulated that warnings no longer serve any useful purpose.
|
||||
// *
|
||||
// * Sometimes a poorly behaved task will write output to stderr even though its operation was successful.
|
||||
// * In that case, it's strongly recommended to fix the task. However, as a workaround you can set
|
||||
// * allowWarningsInSuccessfulBuild=true, which causes Rush to return a nonzero exit code for errors only.
|
||||
// *
|
||||
// * Note: The default value is false. In Rush 5.7.x and earlier, the default value was true.
|
||||
// */
|
||||
// "allowWarningsInSuccessfulBuild": false
|
||||
// },
|
||||
//
|
||||
// {
|
||||
// /**
|
||||
// * (Required) Determines the type of custom command.
|
||||
// * Rush's "global" commands are invoked once for the entire repo.
|
||||
// */
|
||||
// "commandKind": "global",
|
||||
//
|
||||
// "name": "my-global-command",
|
||||
// "summary": "Example global custom command",
|
||||
// "description": "This is an example custom command that runs once for the entire repo",
|
||||
//
|
||||
// "safeForSimultaneousRushProcesses": false,
|
||||
//
|
||||
// /**
|
||||
// * A script that will be invoked using the OS shell. The working directory will be the folder
|
||||
// * that contains rush.json. If custom parameters are associated with this command, their
|
||||
// * values will be appended to the end of this string.
|
||||
// */
|
||||
// "shellCommand": "node common/scripts/my-global-command.js"
|
||||
// }
|
||||
],
|
||||
|
||||
/**
|
||||
* Custom "parameters" introduce new parameters for specified Rush command-line commands.
|
||||
* For example, you might define a "--production" parameter for the "rush build" command.
|
||||
*/
|
||||
"parameters": [
|
||||
// {
|
||||
// /**
|
||||
// * (Required) Determines the type of custom parameter.
|
||||
// * A "flag" is a custom command-line parameter whose presence acts as an on/off switch.
|
||||
// */
|
||||
// "parameterKind": "flag",
|
||||
//
|
||||
// /**
|
||||
// * (Required) The long name of the parameter. It must be lower-case and use dash delimiters.
|
||||
// */
|
||||
// "longName": "--my-flag",
|
||||
//
|
||||
// /**
|
||||
// * An optional alternative short name for the parameter. It must be a dash followed by a single
|
||||
// * lower-case or upper-case letter, which is case-sensitive.
|
||||
// *
|
||||
// * NOTE: The Rush developers recommend that automation scripts should always use the long name
|
||||
// * to improve readability. The short name is only intended as a convenience for humans.
|
||||
// * The alphabet letters run out quickly, and are difficult to memorize, so *only* use
|
||||
// * a short name if you expect the parameter to be needed very often in everyday operations.
|
||||
// */
|
||||
// "shortName": "-m",
|
||||
//
|
||||
// /**
|
||||
// * (Required) A long description to be shown in the command-line help.
|
||||
// *
|
||||
// * Whenever you introduce commands/parameters, taking a little time to write meaningful
|
||||
// * documentation can make a big difference for the developer experience in your repo.
|
||||
// */
|
||||
// "description": "A custom flag parameter that is passed to the scripts that are invoked when building projects",
|
||||
//
|
||||
// /**
|
||||
// * (Required) A list of custom commands and/or built-in Rush commands that this parameter may
|
||||
// * be used with. The parameter will be appended to the shell command that Rush invokes.
|
||||
// */
|
||||
// "associatedCommands": [ "build", "rebuild" ]
|
||||
// },
|
||||
//
|
||||
// {
|
||||
// /**
|
||||
// * (Required) Determines the type of custom parameter.
|
||||
// * A "flag" is a custom command-line parameter whose presence acts as an on/off switch.
|
||||
// */
|
||||
// "parameterKind": "choice",
|
||||
// "longName": "--my-choice",
|
||||
// "description": "A custom choice parameter for the \"my-global-command\" custom command",
|
||||
//
|
||||
// "associatedCommands": [ "my-global-command" ],
|
||||
//
|
||||
// /**
|
||||
// * Normally if a parameter is omitted from the command line, it will not be passed
|
||||
// * to the shell command. this value will be inserted by default. Whereas if a "defaultValue"
|
||||
// * is defined, the parameter will always be passed to the shell command, and will use the
|
||||
// * default value if unspecified. The value must be one of the defined alternatives.
|
||||
// */
|
||||
// "defaultValue": "vanilla",
|
||||
//
|
||||
// /**
|
||||
// * (Required) A list of alternative argument values that can be chosen for this parameter.
|
||||
// */
|
||||
// "alternatives": [
|
||||
// {
|
||||
// /**
|
||||
// * A token that is one of the alternatives that can be used with the choice parameter,
|
||||
// * e.g. "vanilla" in "--flavor vanilla".
|
||||
// */
|
||||
// "name": "vanilla",
|
||||
//
|
||||
// /**
|
||||
// * A detailed description for the alternative that can be shown in the command-line help.
|
||||
// *
|
||||
// * Whenever you introduce commands/parameters, taking a little time to write meaningful
|
||||
// * documentation can make a big difference for the developer experience in your repo.
|
||||
// */
|
||||
// "description": "Use the vanilla flavor (the default)"
|
||||
// },
|
||||
//
|
||||
// {
|
||||
// "name": "chocolate",
|
||||
// "description": "Use the chocolate flavor"
|
||||
// },
|
||||
//
|
||||
// {
|
||||
// "name": "strawberry",
|
||||
// "description": "Use the strawberry flavor"
|
||||
// }
|
||||
// ]
|
||||
// }
|
||||
]
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
/**
|
||||
* This configuration file specifies NPM dependency version selections that affect all projects
|
||||
* in a Rush repo. For full documentation, please see https://rushjs.io
|
||||
*/
|
||||
{
|
||||
"$schema": "https://developer.microsoft.com/json-schemas/rush/v5/common-versions.schema.json",
|
||||
|
||||
/**
|
||||
* A table that specifies a "preferred version" for a dependency package. The "preferred version"
|
||||
* is typically used to hold an indirect dependency back to a specific version, however generally
|
||||
* it can be any SemVer range specifier (e.g. "~1.2.3"), and it will narrow any (compatible)
|
||||
* SemVer range specifier. See the Rush documentation for details about this feature.
|
||||
*
|
||||
* After modifying this field, it's recommended to run "rush update --full" so that the package manager
|
||||
* will recalculate all version selections.
|
||||
*/
|
||||
"preferredVersions": {
|
||||
|
||||
/**
|
||||
* When someone asks for "^1.0.0" make sure they get "1.2.3" when working in this repo,
|
||||
* instead of the latest version.
|
||||
*/
|
||||
// "some-library": "1.2.3"
|
||||
},
|
||||
|
||||
/**
|
||||
* The "rush check" command can be used to enforce that every project in the repo must specify
|
||||
* the same SemVer range for a given dependency. However, sometimes exceptions are needed.
|
||||
* The allowedAlternativeVersions table allows you to list other SemVer ranges that will be
|
||||
* accepted by "rush check" for a given dependency.
|
||||
*
|
||||
* IMPORTANT: THIS TABLE IS FOR *ADDITIONAL* VERSION RANGES THAT ARE ALTERNATIVES TO THE
|
||||
* USUAL VERSION (WHICH IS INFERRED BY LOOKING AT ALL PROJECTS IN THE REPO).
|
||||
* This design avoids unnecessary churn in this file.
|
||||
*/
|
||||
"allowedAlternativeVersions": {
|
||||
|
||||
/**
|
||||
* For example, allow some projects to use an older TypeScript compiler
|
||||
* (in addition to whatever "usual" version is being used by other projects in the repo):
|
||||
*/
|
||||
// "typescript": [
|
||||
// "~2.4.0"
|
||||
// ]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
/**
|
||||
* This configuration file allows repo maintainers to enable and disable experimental
|
||||
* Rush features. For full documentation, please see https://rushjs.io
|
||||
*/
|
||||
{
|
||||
"$schema": "https://developer.microsoft.com/json-schemas/rush/v5/experiments.schema.json",
|
||||
|
||||
/**
|
||||
* Rush 5.14.0 improved incremental builds to ignore spurious changes in the pnpm-lock.json file.
|
||||
* This optimization is enabled by default. If you encounter a problem where "rush build" is neglecting
|
||||
* to build some projects, please open a GitHub issue. As a workaround you can uncomment this line
|
||||
* to temporarily restore the old behavior where everything must be rebuilt whenever pnpm-lock.json
|
||||
* is modified.
|
||||
*/
|
||||
"legacyIncrementalBuildDependencyDetection": false
|
||||
}
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,38 @@
|
|||
"use strict";
|
||||
|
||||
/**
|
||||
* When using the PNPM package manager, you can use pnpmfile.js to workaround
|
||||
* dependencies that have mistakes in their package.json file. (This feature is
|
||||
* functionally similar to Yarn's "resolutions".)
|
||||
*
|
||||
* For details, see the PNPM documentation:
|
||||
* https://pnpm.js.org/docs/en/hooks.html
|
||||
*
|
||||
* IMPORTANT: SINCE THIS FILE CONTAINS EXECUTABLE CODE, MODIFYING IT IS LIKELY TO INVALIDATE
|
||||
* ANY CACHED DEPENDENCY ANALYSIS. After any modification to pnpmfile.js, it's recommended to run
|
||||
* "rush update --full" so that PNPM will recalculate all version selections.
|
||||
*/
|
||||
module.exports = {
|
||||
hooks: {
|
||||
readPackage
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* This hook is invoked during installation before a package's dependencies
|
||||
* are selected.
|
||||
* The `packageJson` parameter is the deserialized package.json
|
||||
* contents for the package that is about to be installed.
|
||||
* The `context` parameter provides a log() function.
|
||||
* The return value is the updated object.
|
||||
*/
|
||||
function readPackage(packageJson, context) {
|
||||
|
||||
// // The karma types have a missing dependency on typings from the log4js package.
|
||||
// if (packageJson.name === '@types/karma') {
|
||||
// context.log('Fixed up dependencies for @types/karma');
|
||||
// packageJson.dependencies['log4js'] = '0.6.38';
|
||||
// }
|
||||
|
||||
return packageJson;
|
||||
}
|
|
@ -0,0 +1,90 @@
|
|||
/**
|
||||
* This is configuration file is used for advanced publishing configurations with Rush.
|
||||
* For full documentation, please see https://rushjs.io
|
||||
*/
|
||||
|
||||
/**
|
||||
* A list of version policy definitions. A "version policy" is a custom package versioning
|
||||
* strategy that affets "rush change", "rush version", and "rush publish". The strategy applies
|
||||
* to a set of projects that are specified using the "versionPolicyName" field in rush.json.
|
||||
*/
|
||||
[
|
||||
// {
|
||||
// /**
|
||||
// * (Required) Indicates the kind of version policy being defined ("lockStepVersion" or "individualVersion").
|
||||
// *
|
||||
// * The "lockStepVersion" mode specifies that the projects will use "lock-step versioning". This
|
||||
// * strategy is appropriate for a set of packages that act as selectable components of a
|
||||
// * unified product. The entire set of packages are always published together, and always share
|
||||
// * the same NPM version number. When the packages depend on other packages in the set, the
|
||||
// * SemVer range is usually restricted to a single version.
|
||||
// */
|
||||
// "definitionName": "lockStepVersion",
|
||||
//
|
||||
// /**
|
||||
// * (Required) The name that will be used for the "versionPolicyName" field in rush.json.
|
||||
// * This name is also used command-line parameters such as "--version-policy"
|
||||
// * and "--to-version-policy".
|
||||
// */
|
||||
// "policyName": "MyBigFramework",
|
||||
//
|
||||
// /**
|
||||
// * (Required) The current version. All packages belonging to the set should have this version
|
||||
// * in the current branch. When bumping versions, Rush uses this to determine the next version.
|
||||
// * (The "version" field in package.json is NOT considered.)
|
||||
// */
|
||||
// "version": "1.0.0",
|
||||
//
|
||||
// /**
|
||||
// * (Required) The type of bump that will be performed when publishing the next release.
|
||||
// * When creating a release branch in Git, this field should be updated according to the
|
||||
// * type of release.
|
||||
// *
|
||||
// * Valid values are: "prerelease", "release", "minor", "patch", "major"
|
||||
// */
|
||||
// "nextBump": "prerelease",
|
||||
//
|
||||
// /**
|
||||
// * (Optional) If specified, all packages in the set share a common CHANGELOG.md file.
|
||||
// * This file is stored with the specified "main" project, which must be a member of the set.
|
||||
// *
|
||||
// * If this field is omitted, then a separate CHANGELOG.md file will be maintained for each
|
||||
// * package in the set.
|
||||
// */
|
||||
// "mainProject": "my-app"
|
||||
// },
|
||||
//
|
||||
// {
|
||||
// /**
|
||||
// * (Required) Indicates the kind of version policy being defined ("lockStepVersion" or "individualVersion").
|
||||
// *
|
||||
// * The "individualVersion" mode specifies that the projects will use "individual versioning".
|
||||
// * This is the typical NPM model where each package has an independent version number
|
||||
// * and CHANGELOG.md file. Although a single CI definition is responsible for publishing the
|
||||
// * packages, they otherwise don't have any special relationship. The version bumping will
|
||||
// * depend on how developers answer the "rush change" questions for each package that
|
||||
// * is changed.
|
||||
// */
|
||||
// "definitionName": "individualVersion",
|
||||
//
|
||||
// "policyName": "MyRandomLibraries",
|
||||
//
|
||||
// /**
|
||||
// * (Optional) This can be used to enforce that all packages in the set must share a common
|
||||
// * major version number, e.g. because they are from the same major release branch.
|
||||
// * It can also be used to discourage people from accidentally making "MAJOR" SemVer changes
|
||||
// * inappropriately. The minor/patch version parts will be bumped independently according
|
||||
// * to the types of changes made to each project, according to the "rush change" command.
|
||||
// */
|
||||
// "lockedMajor": 3,
|
||||
//
|
||||
// /**
|
||||
// * (Optional) When publishing is managed by Rush, by default the "rush change" command will
|
||||
// * request changes for any projects that are modified by a pull request. These change entries
|
||||
// * will produce a CHANGELOG.md file. If you author your CHANGELOG.md manually or announce updates
|
||||
// * in some other way, set "exemptFromRushChange" to true to tell "rush change" to ignore the projects
|
||||
// * belonging to this version policy.
|
||||
// */
|
||||
// "exemptFromRushChange": false
|
||||
// }
|
||||
]
|
|
@ -0,0 +1,67 @@
|
|||
"use strict";
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
|
||||
// See the @microsoft/rush package's LICENSE file for license information.
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// THIS FILE WAS GENERATED BY A TOOL. ANY MANUAL MODIFICATIONS WILL GET OVERWRITTEN WHENEVER RUSH IS UPGRADED.
|
||||
//
|
||||
// This script is intended for usage in an automated build environment where the Rush command may not have
|
||||
// been preinstalled, or may have an unpredictable version. This script will automatically install the version of Rush
|
||||
// specified in the rush.json configuration file (if not already installed), and then pass a command-line to it.
|
||||
// An example usage would be:
|
||||
//
|
||||
// node common/scripts/install-run-rush.js install
|
||||
//
|
||||
// For more information, see: https://rushjs.io/pages/maintainer/setup_new_repo/
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const install_run_1 = require("./install-run");
|
||||
const PACKAGE_NAME = '@microsoft/rush';
|
||||
const RUSH_PREVIEW_VERSION = 'RUSH_PREVIEW_VERSION';
|
||||
function _getRushVersion() {
|
||||
const rushPreviewVersion = process.env[RUSH_PREVIEW_VERSION];
|
||||
if (rushPreviewVersion !== undefined) {
|
||||
console.log(`Using Rush version from environment variable ${RUSH_PREVIEW_VERSION}=${rushPreviewVersion}`);
|
||||
return rushPreviewVersion;
|
||||
}
|
||||
const rushJsonFolder = install_run_1.findRushJsonFolder();
|
||||
const rushJsonPath = path.join(rushJsonFolder, install_run_1.RUSH_JSON_FILENAME);
|
||||
try {
|
||||
const rushJsonContents = fs.readFileSync(rushJsonPath, 'utf-8');
|
||||
// Use a regular expression to parse out the rushVersion value because rush.json supports comments,
|
||||
// but JSON.parse does not and we don't want to pull in more dependencies than we need to in this script.
|
||||
const rushJsonMatches = rushJsonContents.match(/\"rushVersion\"\s*\:\s*\"([0-9a-zA-Z.+\-]+)\"/);
|
||||
return rushJsonMatches[1];
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Unable to determine the required version of Rush from rush.json (${rushJsonFolder}). ` +
|
||||
'The \'rushVersion\' field is either not assigned in rush.json or was specified ' +
|
||||
'using an unexpected syntax.');
|
||||
}
|
||||
}
|
||||
function _run() {
|
||||
const [nodePath, /* Ex: /bin/node */ scriptPath, /* /repo/common/scripts/install-run-rush.js */ ...packageBinArgs /* [build, --to, myproject] */] = process.argv;
|
||||
// Detect if this script was directly invoked, or if the install-run-rushx script was invokved to select the
|
||||
// appropriate binary inside the rush package to run
|
||||
const scriptName = path.basename(scriptPath);
|
||||
const bin = scriptName.toLowerCase() === 'install-run-rushx.js' ? 'rushx' : 'rush';
|
||||
if (!nodePath || !scriptPath) {
|
||||
throw new Error('Unexpected exception: could not detect node path or script path');
|
||||
}
|
||||
if (process.argv.length < 3) {
|
||||
console.log(`Usage: ${scriptName} <command> [args...]`);
|
||||
if (scriptName === 'install-run-rush.js') {
|
||||
console.log(`Example: ${scriptName} build --to myproject`);
|
||||
}
|
||||
else {
|
||||
console.log(`Example: ${scriptName} custom-command`);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
install_run_1.runWithErrorAndStatusCode(() => {
|
||||
const version = _getRushVersion();
|
||||
console.log(`The rush.json configuration requests Rush version ${version}`);
|
||||
return install_run_1.installAndRun(PACKAGE_NAME, version, bin, packageBinArgs);
|
||||
});
|
||||
}
|
||||
_run();
|
||||
//# sourceMappingURL=install-run-rush.js.map
|
|
@ -0,0 +1,18 @@
|
|||
"use strict";
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
|
||||
// See the @microsoft/rush package's LICENSE file for license information.
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// THIS FILE WAS GENERATED BY A TOOL. ANY MANUAL MODIFICATIONS WILL GET OVERWRITTEN WHENEVER RUSH IS UPGRADED.
|
||||
//
|
||||
// This script is intended for usage in an automated build environment where the Rush command may not have
|
||||
// been preinstalled, or may have an unpredictable version. This script will automatically install the version of Rush
|
||||
// specified in the rush.json configuration file (if not already installed), and then pass a command-line to the
|
||||
// rushx command.
|
||||
//
|
||||
// An example usage would be:
|
||||
//
|
||||
// node common/scripts/install-run-rushx.js custom-command
|
||||
//
|
||||
// For more information, see: https://rushjs.io/pages/maintainer/setup_new_repo/
|
||||
require("./install-run-rush");
|
||||
//# sourceMappingURL=install-run-rushx.js.map
|
|
@ -0,0 +1,423 @@
|
|||
"use strict";
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
|
||||
// See the @microsoft/rush package's LICENSE file for license information.
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// THIS FILE WAS GENERATED BY A TOOL. ANY MANUAL MODIFICATIONS WILL GET OVERWRITTEN WHENEVER RUSH IS UPGRADED.
|
||||
//
|
||||
// This script is intended for usage in an automated build environment where a Node tool may not have
|
||||
// been preinstalled, or may have an unpredictable version. This script will automatically install the specified
|
||||
// version of the specified tool (if not already installed), and then pass a command-line to it.
|
||||
// An example usage would be:
|
||||
//
|
||||
// node common/scripts/install-run.js qrcode@1.2.2 qrcode https://rushjs.io
|
||||
//
|
||||
// For more information, see: https://rushjs.io/pages/maintainer/setup_new_repo/
|
||||
const childProcess = require("child_process");
|
||||
const fs = require("fs");
|
||||
const os = require("os");
|
||||
const path = require("path");
|
||||
exports.RUSH_JSON_FILENAME = 'rush.json';
|
||||
const RUSH_TEMP_FOLDER_ENV_VARIABLE_NAME = 'RUSH_TEMP_FOLDER';
|
||||
const INSTALLED_FLAG_FILENAME = 'installed.flag';
|
||||
const NODE_MODULES_FOLDER_NAME = 'node_modules';
|
||||
const PACKAGE_JSON_FILENAME = 'package.json';
|
||||
/**
|
||||
* Parse a package specifier (in the form of name\@version) into name and version parts.
|
||||
*/
|
||||
function _parsePackageSpecifier(rawPackageSpecifier) {
|
||||
rawPackageSpecifier = (rawPackageSpecifier || '').trim();
|
||||
const separatorIndex = rawPackageSpecifier.lastIndexOf('@');
|
||||
let name;
|
||||
let version = undefined;
|
||||
if (separatorIndex === 0) {
|
||||
// The specifier starts with a scope and doesn't have a version specified
|
||||
name = rawPackageSpecifier;
|
||||
}
|
||||
else if (separatorIndex === -1) {
|
||||
// The specifier doesn't have a version
|
||||
name = rawPackageSpecifier;
|
||||
}
|
||||
else {
|
||||
name = rawPackageSpecifier.substring(0, separatorIndex);
|
||||
version = rawPackageSpecifier.substring(separatorIndex + 1);
|
||||
}
|
||||
if (!name) {
|
||||
throw new Error(`Invalid package specifier: ${rawPackageSpecifier}`);
|
||||
}
|
||||
return { name, version };
|
||||
}
|
||||
/**
|
||||
* As a workaround, _syncNpmrc() copies the .npmrc file to the target folder, and also trims
|
||||
* unusable lines from the .npmrc file. If the source .npmrc file not exist, then _syncNpmrc()
|
||||
* will delete an .npmrc that is found in the target folder.
|
||||
*
|
||||
* Why are we trimming the .npmrc lines? NPM allows environment variables to be specified in
|
||||
* the .npmrc file to provide different authentication tokens for different registry.
|
||||
* However, if the environment variable is undefined, it expands to an empty string, which
|
||||
* produces a valid-looking mapping with an invalid URL that causes an error. Instead,
|
||||
* we'd prefer to skip that line and continue looking in other places such as the user's
|
||||
* home directory.
|
||||
*
|
||||
* IMPORTANT: THIS CODE SHOULD BE KEPT UP TO DATE WITH Utilities._syncNpmrc()
|
||||
*/
|
||||
function _syncNpmrc(sourceNpmrcFolder, targetNpmrcFolder) {
|
||||
const sourceNpmrcPath = path.join(sourceNpmrcFolder, '.npmrc');
|
||||
const targetNpmrcPath = path.join(targetNpmrcFolder, '.npmrc');
|
||||
try {
|
||||
if (fs.existsSync(sourceNpmrcPath)) {
|
||||
let npmrcFileLines = fs.readFileSync(sourceNpmrcPath).toString().split('\n');
|
||||
npmrcFileLines = npmrcFileLines.map((line) => (line || '').trim());
|
||||
const resultLines = [];
|
||||
// Trim out lines that reference environment variables that aren't defined
|
||||
for (const line of npmrcFileLines) {
|
||||
// This finds environment variable tokens that look like "${VAR_NAME}"
|
||||
const regex = /\$\{([^\}]+)\}/g;
|
||||
const environmentVariables = line.match(regex);
|
||||
let lineShouldBeTrimmed = false;
|
||||
if (environmentVariables) {
|
||||
for (const token of environmentVariables) {
|
||||
// Remove the leading "${" and the trailing "}" from the token
|
||||
const environmentVariableName = token.substring(2, token.length - 1);
|
||||
if (!process.env[environmentVariableName]) {
|
||||
lineShouldBeTrimmed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (lineShouldBeTrimmed) {
|
||||
// Example output:
|
||||
// "; MISSING ENVIRONMENT VARIABLE: //my-registry.com/npm/:_authToken=${MY_AUTH_TOKEN}"
|
||||
resultLines.push('; MISSING ENVIRONMENT VARIABLE: ' + line);
|
||||
}
|
||||
else {
|
||||
resultLines.push(line);
|
||||
}
|
||||
}
|
||||
fs.writeFileSync(targetNpmrcPath, resultLines.join(os.EOL));
|
||||
}
|
||||
else if (fs.existsSync(targetNpmrcPath)) {
|
||||
// If the source .npmrc doesn't exist and there is one in the target, delete the one in the target
|
||||
fs.unlinkSync(targetNpmrcPath);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Error syncing .npmrc file: ${e}`);
|
||||
}
|
||||
}
|
||||
let _npmPath = undefined;
|
||||
/**
|
||||
* Get the absolute path to the npm executable
|
||||
*/
|
||||
function getNpmPath() {
|
||||
if (!_npmPath) {
|
||||
try {
|
||||
if (os.platform() === 'win32') {
|
||||
// We're on Windows
|
||||
const whereOutput = childProcess.execSync('where npm', { stdio: [] }).toString();
|
||||
const lines = whereOutput.split(os.EOL).filter((line) => !!line);
|
||||
// take the last result, we are looking for a .cmd command
|
||||
// see https://github.com/microsoft/rushstack/issues/759
|
||||
_npmPath = lines[lines.length - 1];
|
||||
}
|
||||
else {
|
||||
// We aren't on Windows - assume we're on *NIX or Darwin
|
||||
_npmPath = childProcess.execSync('which npm', { stdio: [] }).toString();
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Unable to determine the path to the NPM tool: ${e}`);
|
||||
}
|
||||
_npmPath = _npmPath.trim();
|
||||
if (!fs.existsSync(_npmPath)) {
|
||||
throw new Error('The NPM executable does not exist');
|
||||
}
|
||||
}
|
||||
return _npmPath;
|
||||
}
|
||||
exports.getNpmPath = getNpmPath;
|
||||
function _ensureFolder(folderPath) {
|
||||
if (!fs.existsSync(folderPath)) {
|
||||
const parentDir = path.dirname(folderPath);
|
||||
_ensureFolder(parentDir);
|
||||
fs.mkdirSync(folderPath);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Create missing directories under the specified base directory, and return the resolved directory.
|
||||
*
|
||||
* Does not support "." or ".." path segments.
|
||||
* Assumes the baseFolder exists.
|
||||
*/
|
||||
function _ensureAndJoinPath(baseFolder, ...pathSegments) {
|
||||
let joinedPath = baseFolder;
|
||||
try {
|
||||
for (let pathSegment of pathSegments) {
|
||||
pathSegment = pathSegment.replace(/[\\\/]/g, '+');
|
||||
joinedPath = path.join(joinedPath, pathSegment);
|
||||
if (!fs.existsSync(joinedPath)) {
|
||||
fs.mkdirSync(joinedPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Error building local installation folder (${path.join(baseFolder, ...pathSegments)}): ${e}`);
|
||||
}
|
||||
return joinedPath;
|
||||
}
|
||||
function _getRushTempFolder(rushCommonFolder) {
|
||||
const rushTempFolder = process.env[RUSH_TEMP_FOLDER_ENV_VARIABLE_NAME];
|
||||
if (rushTempFolder !== undefined) {
|
||||
_ensureFolder(rushTempFolder);
|
||||
return rushTempFolder;
|
||||
}
|
||||
else {
|
||||
return _ensureAndJoinPath(rushCommonFolder, 'temp');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Resolve a package specifier to a static version
|
||||
*/
|
||||
function _resolvePackageVersion(rushCommonFolder, { name, version }) {
|
||||
if (!version) {
|
||||
version = '*'; // If no version is specified, use the latest version
|
||||
}
|
||||
if (version.match(/^[a-zA-Z0-9\-\+\.]+$/)) {
|
||||
// If the version contains only characters that we recognize to be used in static version specifiers,
|
||||
// pass the version through
|
||||
return version;
|
||||
}
|
||||
else {
|
||||
// version resolves to
|
||||
try {
|
||||
const rushTempFolder = _getRushTempFolder(rushCommonFolder);
|
||||
const sourceNpmrcFolder = path.join(rushCommonFolder, 'config', 'rush');
|
||||
_syncNpmrc(sourceNpmrcFolder, rushTempFolder);
|
||||
const npmPath = getNpmPath();
|
||||
// This returns something that looks like:
|
||||
// @microsoft/rush@3.0.0 '3.0.0'
|
||||
// @microsoft/rush@3.0.1 '3.0.1'
|
||||
// ...
|
||||
// @microsoft/rush@3.0.20 '3.0.20'
|
||||
// <blank line>
|
||||
const npmVersionSpawnResult = childProcess.spawnSync(npmPath, ['view', `${name}@${version}`, 'version', '--no-update-notifier'], {
|
||||
cwd: rushTempFolder,
|
||||
stdio: []
|
||||
});
|
||||
if (npmVersionSpawnResult.status !== 0) {
|
||||
throw new Error(`"npm view" returned error code ${npmVersionSpawnResult.status}`);
|
||||
}
|
||||
const npmViewVersionOutput = npmVersionSpawnResult.stdout.toString();
|
||||
const versionLines = npmViewVersionOutput.split('\n').filter((line) => !!line);
|
||||
const latestVersion = versionLines[versionLines.length - 1];
|
||||
if (!latestVersion) {
|
||||
throw new Error('No versions found for the specified version range.');
|
||||
}
|
||||
const versionMatches = latestVersion.match(/^.+\s\'(.+)\'$/);
|
||||
if (!versionMatches) {
|
||||
throw new Error(`Invalid npm output ${latestVersion}`);
|
||||
}
|
||||
return versionMatches[1];
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Unable to resolve version ${version} of package ${name}: ${e}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
let _rushJsonFolder;
|
||||
/**
|
||||
* Find the absolute path to the folder containing rush.json
|
||||
*/
|
||||
function findRushJsonFolder() {
|
||||
if (!_rushJsonFolder) {
|
||||
let basePath = __dirname;
|
||||
let tempPath = __dirname;
|
||||
do {
|
||||
const testRushJsonPath = path.join(basePath, exports.RUSH_JSON_FILENAME);
|
||||
if (fs.existsSync(testRushJsonPath)) {
|
||||
_rushJsonFolder = basePath;
|
||||
break;
|
||||
}
|
||||
else {
|
||||
basePath = tempPath;
|
||||
}
|
||||
} while (basePath !== (tempPath = path.dirname(basePath))); // Exit the loop when we hit the disk root
|
||||
if (!_rushJsonFolder) {
|
||||
throw new Error('Unable to find rush.json.');
|
||||
}
|
||||
}
|
||||
return _rushJsonFolder;
|
||||
}
|
||||
exports.findRushJsonFolder = findRushJsonFolder;
|
||||
/**
|
||||
* Detects if the package in the specified directory is installed
|
||||
*/
|
||||
function _isPackageAlreadyInstalled(packageInstallFolder) {
|
||||
try {
|
||||
const flagFilePath = path.join(packageInstallFolder, INSTALLED_FLAG_FILENAME);
|
||||
if (!fs.existsSync(flagFilePath)) {
|
||||
return false;
|
||||
}
|
||||
const fileContents = fs.readFileSync(flagFilePath).toString();
|
||||
return fileContents.trim() === process.version;
|
||||
}
|
||||
catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Removes the following files and directories under the specified folder path:
|
||||
* - installed.flag
|
||||
* -
|
||||
* - node_modules
|
||||
*/
|
||||
function _cleanInstallFolder(rushTempFolder, packageInstallFolder) {
|
||||
try {
|
||||
const flagFile = path.resolve(packageInstallFolder, INSTALLED_FLAG_FILENAME);
|
||||
if (fs.existsSync(flagFile)) {
|
||||
fs.unlinkSync(flagFile);
|
||||
}
|
||||
const packageLockFile = path.resolve(packageInstallFolder, 'package-lock.json');
|
||||
if (fs.existsSync(packageLockFile)) {
|
||||
fs.unlinkSync(packageLockFile);
|
||||
}
|
||||
const nodeModulesFolder = path.resolve(packageInstallFolder, NODE_MODULES_FOLDER_NAME);
|
||||
if (fs.existsSync(nodeModulesFolder)) {
|
||||
const rushRecyclerFolder = _ensureAndJoinPath(rushTempFolder, 'rush-recycler', `install-run-${Date.now().toString()}`);
|
||||
fs.renameSync(nodeModulesFolder, rushRecyclerFolder);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Error cleaning the package install folder (${packageInstallFolder}): ${e}`);
|
||||
}
|
||||
}
|
||||
function _createPackageJson(packageInstallFolder, name, version) {
|
||||
try {
|
||||
const packageJsonContents = {
|
||||
'name': 'ci-rush',
|
||||
'version': '0.0.0',
|
||||
'dependencies': {
|
||||
[name]: version
|
||||
},
|
||||
'description': 'DON\'T WARN',
|
||||
'repository': 'DON\'T WARN',
|
||||
'license': 'MIT'
|
||||
};
|
||||
const packageJsonPath = path.join(packageInstallFolder, PACKAGE_JSON_FILENAME);
|
||||
fs.writeFileSync(packageJsonPath, JSON.stringify(packageJsonContents, undefined, 2));
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Unable to create package.json: ${e}`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Run "npm install" in the package install folder.
|
||||
*/
|
||||
function _installPackage(packageInstallFolder, name, version) {
|
||||
try {
|
||||
console.log(`Installing ${name}...`);
|
||||
const npmPath = getNpmPath();
|
||||
const result = childProcess.spawnSync(npmPath, ['install'], {
|
||||
stdio: 'inherit',
|
||||
cwd: packageInstallFolder,
|
||||
env: process.env
|
||||
});
|
||||
if (result.status !== 0) {
|
||||
throw new Error('"npm install" encountered an error');
|
||||
}
|
||||
console.log(`Successfully installed ${name}@${version}`);
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Unable to install package: ${e}`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get the ".bin" path for the package.
|
||||
*/
|
||||
function _getBinPath(packageInstallFolder, binName) {
|
||||
const binFolderPath = path.resolve(packageInstallFolder, NODE_MODULES_FOLDER_NAME, '.bin');
|
||||
const resolvedBinName = (os.platform() === 'win32') ? `${binName}.cmd` : binName;
|
||||
return path.resolve(binFolderPath, resolvedBinName);
|
||||
}
|
||||
/**
|
||||
* Write a flag file to the package's install directory, signifying that the install was successful.
|
||||
*/
|
||||
function _writeFlagFile(packageInstallFolder) {
|
||||
try {
|
||||
const flagFilePath = path.join(packageInstallFolder, INSTALLED_FLAG_FILENAME);
|
||||
fs.writeFileSync(flagFilePath, process.version);
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Unable to create installed.flag file in ${packageInstallFolder}`);
|
||||
}
|
||||
}
|
||||
function installAndRun(packageName, packageVersion, packageBinName, packageBinArgs) {
|
||||
const rushJsonFolder = findRushJsonFolder();
|
||||
const rushCommonFolder = path.join(rushJsonFolder, 'common');
|
||||
const rushTempFolder = _getRushTempFolder(rushCommonFolder);
|
||||
const packageInstallFolder = _ensureAndJoinPath(rushTempFolder, 'install-run', `${packageName}@${packageVersion}`);
|
||||
if (!_isPackageAlreadyInstalled(packageInstallFolder)) {
|
||||
// The package isn't already installed
|
||||
_cleanInstallFolder(rushTempFolder, packageInstallFolder);
|
||||
const sourceNpmrcFolder = path.join(rushCommonFolder, 'config', 'rush');
|
||||
_syncNpmrc(sourceNpmrcFolder, packageInstallFolder);
|
||||
_createPackageJson(packageInstallFolder, packageName, packageVersion);
|
||||
_installPackage(packageInstallFolder, packageName, packageVersion);
|
||||
_writeFlagFile(packageInstallFolder);
|
||||
}
|
||||
const statusMessage = `Invoking "${packageBinName} ${packageBinArgs.join(' ')}"`;
|
||||
const statusMessageLine = new Array(statusMessage.length + 1).join('-');
|
||||
console.log(os.EOL + statusMessage + os.EOL + statusMessageLine + os.EOL);
|
||||
const binPath = _getBinPath(packageInstallFolder, packageBinName);
|
||||
const result = childProcess.spawnSync(binPath, packageBinArgs, {
|
||||
stdio: 'inherit',
|
||||
cwd: process.cwd(),
|
||||
env: process.env
|
||||
});
|
||||
if (result.status !== null) {
|
||||
return result.status;
|
||||
}
|
||||
else {
|
||||
throw result.error || new Error('An unknown error occurred.');
|
||||
}
|
||||
}
|
||||
exports.installAndRun = installAndRun;
|
||||
function runWithErrorAndStatusCode(fn) {
|
||||
process.exitCode = 1;
|
||||
try {
|
||||
const exitCode = fn();
|
||||
process.exitCode = exitCode;
|
||||
}
|
||||
catch (e) {
|
||||
console.error(os.EOL + os.EOL + e.toString() + os.EOL + os.EOL);
|
||||
}
|
||||
}
|
||||
exports.runWithErrorAndStatusCode = runWithErrorAndStatusCode;
|
||||
function _run() {
|
||||
const [nodePath, /* Ex: /bin/node */ scriptPath, /* /repo/common/scripts/install-run-rush.js */ rawPackageSpecifier, /* qrcode@^1.2.0 */ packageBinName, /* qrcode */ ...packageBinArgs /* [-f, myproject/lib] */] = process.argv;
|
||||
if (!nodePath) {
|
||||
throw new Error('Unexpected exception: could not detect node path');
|
||||
}
|
||||
if (path.basename(scriptPath).toLowerCase() !== 'install-run.js') {
|
||||
// If install-run.js wasn't directly invoked, don't execute the rest of this function. Return control
|
||||
// to the script that (presumably) imported this file
|
||||
return;
|
||||
}
|
||||
if (process.argv.length < 4) {
|
||||
console.log('Usage: install-run.js <package>@<version> <command> [args...]');
|
||||
console.log('Example: install-run.js qrcode@1.2.2 qrcode https://rushjs.io');
|
||||
process.exit(1);
|
||||
}
|
||||
runWithErrorAndStatusCode(() => {
|
||||
const rushJsonFolder = findRushJsonFolder();
|
||||
const rushCommonFolder = _ensureAndJoinPath(rushJsonFolder, 'common');
|
||||
const packageSpecifier = _parsePackageSpecifier(rawPackageSpecifier);
|
||||
const name = packageSpecifier.name;
|
||||
const version = _resolvePackageVersion(rushCommonFolder, packageSpecifier);
|
||||
if (packageSpecifier.version !== version) {
|
||||
console.log(`Resolved to ${name}@${version}`);
|
||||
}
|
||||
return installAndRun(name, version, packageBinName, packageBinArgs);
|
||||
});
|
||||
}
|
||||
_run();
|
||||
//# sourceMappingURL=install-run.js.map
|
|
@ -0,0 +1,161 @@
|
|||
// @ts-check
|
||||
// Ensure using node 12 because of recursive mkdir
|
||||
if (
|
||||
!process.env.GEN_CLDR_DATA_IGNORE_NODE_VERSION &&
|
||||
process.version.split('.')[0] < 'v12'
|
||||
) {
|
||||
console.error(`
|
||||
Your node version appears to be below v12: ${ process.version}.
|
||||
This script will not run correctly on earlier versions of node.
|
||||
Set 'GEN_CLDR_DATA_IGNORE_NODE_VERSION' environment variable to truthy to override`);
|
||||
}
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const cp = require('child_process');
|
||||
const os = require('os');
|
||||
|
||||
// from: https://semver.org/
|
||||
const semverRegex = /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?$/
|
||||
const npm = (process.platform === 'win32' ? 'npm.cmd' : 'npm');
|
||||
const packDirName = "./.output";
|
||||
const packDir = path.join(__dirname, "../..", packDirName);
|
||||
|
||||
async function main() {
|
||||
const plog = prettyLogger('version-and-pack', 'main');
|
||||
|
||||
// Extract version from command line
|
||||
const version = extractVersion(process.argv);
|
||||
if (version === undefined) {
|
||||
throw new TypeError("You must specifiy --version as an argument");
|
||||
}
|
||||
if (!semverRegex.test(version)) {
|
||||
throw new RangeError(`Version must match semver V1 format (i.e. X.X.X-label). Received: "${version}"`);
|
||||
}
|
||||
plog(`Setting version: ${version}`);
|
||||
|
||||
// Load all modules we care about from rush
|
||||
let projects;
|
||||
try {
|
||||
const rushConfig = require('../../rush.json');
|
||||
projects = rushConfig.projects;
|
||||
plog("Loading projects:");
|
||||
plog(JSON.stringify(projects, null, ' '));
|
||||
} catch (e) {
|
||||
plog("Could not load projects from rush.json");
|
||||
throw e;
|
||||
}
|
||||
|
||||
try {
|
||||
plog("Creating output directory: " + packDir);
|
||||
createIfNotExistSync(packDir);
|
||||
} catch (e) {
|
||||
plog("Could not create output directory");
|
||||
throw e;
|
||||
}
|
||||
for (const project of projects) {
|
||||
plog("Bumping version for " + project.packageName + " to " + version);
|
||||
const pathToPackage = path.join(__dirname, "../../", project.projectFolder);
|
||||
await exec(npm, ['version', version, '--allow-same-version'], { cwd: pathToPackage });
|
||||
|
||||
plog("Updating dependencies in package.json");
|
||||
const packageJsonPath = path.join(pathToPackage, 'package.json');
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
|
||||
|
||||
for (const dep of projects) {
|
||||
if (packageJson.dependencies[dep.packageName]) {
|
||||
plog("Updating version in " + project.packageName + ": " + dep.packageName + " -> " + version);
|
||||
packageJson.dependencies[dep.packageName] = version;
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, ' ') + '\n', {
|
||||
encoding: 'utf8'
|
||||
});
|
||||
|
||||
plog("Packing " + project.packageName);
|
||||
const output = await exec(npm, ['pack'], { cwd: pathToPackage });
|
||||
const tgz = parseTgz(output);
|
||||
plog('found tgz: ' + tgz);
|
||||
fs.copyFileSync(path.join(pathToPackage, tgz), path.join(packDir, tgz));
|
||||
}
|
||||
|
||||
plog('Complete');
|
||||
}
|
||||
|
||||
function extractVersion(argv) {
|
||||
for (let i = 1; i < argv.length; i++) {
|
||||
if (argv[i - 1] === '--version') {
|
||||
return argv[i].trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function parseTgz(text) {
|
||||
const lines = text.split('\n');
|
||||
for (const line of lines) {
|
||||
if (line.endsWith('.tgz')) {
|
||||
return line;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function exec(command, args, opts) {
|
||||
const stdout = prettyLogger(command, 'stdout');
|
||||
const stderr = prettyLogger(command, 'stderr');
|
||||
const error = prettyLogger(command, 'error');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const p = cp.spawn(command, args, opts);
|
||||
let buffer = '';
|
||||
|
||||
p.stdout.on('data', data => {
|
||||
buffer += data;
|
||||
stdout(`[${command}][stdout]: ${data}`);
|
||||
});
|
||||
|
||||
p.stderr.on('data', data => {
|
||||
stderr(`[${command}][stderr]: ${data}`);
|
||||
});
|
||||
|
||||
p.on('error', err => {
|
||||
error(err);
|
||||
});
|
||||
|
||||
p.on('close', code => {
|
||||
if (code !== 0) {
|
||||
return reject(new Error(`"${command} ${args.join(' ')}" returned unsuccessful error code: ${code}`));
|
||||
} else {
|
||||
resolve(buffer);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function createIfNotExistSync(path) {
|
||||
try {
|
||||
fs.mkdirSync(path, { recursive: true });
|
||||
} catch (e) {
|
||||
if (!e.code === 'EEXIST') {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function prettyLogger(...labels) {
|
||||
const header = `[${labels.join('][')}]: `;
|
||||
return (content) => {
|
||||
if (typeof content !== 'string') {
|
||||
content = JSON.stringify(content, null, ' ');
|
||||
}
|
||||
const lines = content.split('\n');
|
||||
lines.forEach((v) => console.log(header + v));
|
||||
};
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}).then(() => {
|
||||
process.exit(0);
|
||||
});
|
|
@ -0,0 +1,11 @@
|
|||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
|
@ -0,0 +1,12 @@
|
|||
*-debug.log
|
||||
*-error.log
|
||||
/.nyc_output
|
||||
/dist
|
||||
/lib
|
||||
/tmp
|
||||
/yarn.lock
|
||||
node_modules
|
||||
**/.antlr
|
||||
**/java_generated
|
||||
|
||||
results
|
|
@ -0,0 +1,24 @@
|
|||
{
|
||||
"extension": [
|
||||
".ts",
|
||||
".js"
|
||||
],
|
||||
"include": [
|
||||
"src"
|
||||
],
|
||||
"exclude": [
|
||||
"**/node_modules/**",
|
||||
"**/tests/**",
|
||||
"**/coverage/**",
|
||||
"**/*.d.ts"
|
||||
],
|
||||
"reporter": [
|
||||
"html",
|
||||
"lcov",
|
||||
"text"
|
||||
],
|
||||
"all": true,
|
||||
"cache": true,
|
||||
"extends": "@istanbuljs/nyc-config-typescript",
|
||||
"check-coverage": false
|
||||
}
|
|
@ -0,0 +1,96 @@
|
|||
|
||||
This package is intended for Microsoft use only. It is not designed to be consumed as an independent package.
|
||||
|
||||
# Consuming @microsoft/bf-lu as a library
|
||||
@microsoft/bf-lu can be used within a Node.js application as an imported library. Install locally:
|
||||
|
||||
```bash
|
||||
npm install @microsoft/bf-lu --save
|
||||
```
|
||||
|
||||
# V2 API
|
||||
|
||||
## Parsing LU Content
|
||||
To parse LU files, you can use the LUISBuilder class, which returns a LUIS class
|
||||
|
||||
```js
|
||||
const Luis = require('@microsoft/bf-lu').V2.Luis
|
||||
const LUISBuilder = require('@microsoft/bf-lu').V2.LuisBuilder
|
||||
const luContent = `# Greeting
|
||||
- hi`;
|
||||
|
||||
const luisObject = await LUISBuilder.fromContentAsync(luContent)
|
||||
|
||||
// Parsed LUIS object
|
||||
console.log(JSON.stringify(luisObject, 2, null));
|
||||
|
||||
```
|
||||
|
||||
## Validating parsed LU content
|
||||
|
||||
You can use the available validate() function to verify if the parsed LUIS object is valid. This helps catch name conflicts, invalid labelled utterances etc.
|
||||
|
||||
```js
|
||||
const LUISBuilder = require('@microsoft/bf-lu').V2.LuisBuilder
|
||||
const exception = require('@microsoft/bf-lu').V2.Exception
|
||||
const luContent = `# Greeting
|
||||
- hi`;
|
||||
|
||||
const luisObject = await LUISBuilder.fromLUAsync(luContent)
|
||||
luisObject.intents[0].name = "testIntent123456789012345678901234567890123"
|
||||
luisObject.validate()
|
||||
```
|
||||
|
||||
## Generating lu content from LUIS JSON
|
||||
|
||||
You can generate lu content from LUIS instance using parseToLuContent() method. Here's an example code snippet.
|
||||
|
||||
```js
|
||||
const LUISBuilder = require('@microsoft/bf-lu').V2.LuisBuilder
|
||||
const exception = require('@microsoft/bf-lu').V2.Exception
|
||||
const luContent = `# Greeting
|
||||
- hi
|
||||
$userName:first=
|
||||
-vishwac`;
|
||||
const log = false;
|
||||
const locale = 'en-us';
|
||||
async function parseContent() {
|
||||
|
||||
try {
|
||||
const luisObject = await LUISBuilder.fromContentAsync(luContent)
|
||||
luisObject.validate()
|
||||
const parsedLuisBackToLu = luisObject.parseToLuContent()
|
||||
} catch (error) {
|
||||
if (error instanceof exception) {
|
||||
// do something specific to this exception
|
||||
} else {
|
||||
console.log(errObj.text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
parseContent();
|
||||
|
||||
```
|
||||
## Translating lu files
|
||||
|
||||
You can take advantage of the [Microsoft text translation API](https://docs.microsoft.com/en-us/azure/cognitive-services/translator/) to automatically machine translate .lu files to one or more than [60+ languages](https://aka.ms/translate-langs) supported by the Microsoft text translation cognitive service.
|
||||
|
||||
To translate lu file content, you can simply use the translate() method in the LU class. Here's a code snippet.
|
||||
|
||||
```js
|
||||
const LU = require('@microsoft/bf-lu').V2.LU
|
||||
const luContent = `# Greeting
|
||||
- hi
|
||||
$userName:first=
|
||||
-vishwac`;
|
||||
const targetLanguage = 'de';
|
||||
const subscriptionKey = '<YOUR TEXT TRANSLATION KEY>';
|
||||
const translateComments = true;
|
||||
const translateLinkText = true;
|
||||
|
||||
const luInstance = new LU(luContent)
|
||||
await luInstance.translate(subscriptionKey, targetLanguage, translateComments, translateLinkText)
|
||||
const translatedCode = luInstance.content
|
||||
|
||||
```
|
|
@ -0,0 +1,69 @@
|
|||
{
|
||||
"name": "@microsoft/bf-lu-parser",
|
||||
"version": "1.0.0",
|
||||
"author": "Microsoft",
|
||||
"bugs": "https://github.com/microsoft/botframework-cli/issues",
|
||||
"main": "lib/parser/index.js",
|
||||
"browser": "lib/parser/composerindex.js",
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
},
|
||||
"files": [
|
||||
"/lib",
|
||||
"/npm-shrinkwrap.json"
|
||||
],
|
||||
"homepage": "https://github.com/microsoft/botframework-cli",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/microsoft/botframework-cli/tree/master/packages/lu",
|
||||
"scripts": {
|
||||
"postpack": "",
|
||||
"posttest": "tslint -p test -t stylish",
|
||||
"build": "tsc -b",
|
||||
"clean": "rimraf ./.nyc_output ./lib ./package-lock.json ./tsconfig.tsbuildinfo",
|
||||
"test": "mocha",
|
||||
"coverage": "nyc npm run test",
|
||||
"doc": "",
|
||||
"doc:readme": "",
|
||||
"version": "npm run doc:readme && git add README.md"
|
||||
},
|
||||
"nyc": {
|
||||
"exclude": [
|
||||
"**/lufile/generated/**",
|
||||
"test/**"
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@azure/cognitiveservices-luis-authoring": "4.0.0-preview.1",
|
||||
"@azure/ms-rest-azure-js": "2.0.1",
|
||||
"@types/node-fetch": "~2.5.5",
|
||||
"antlr4": "^4.7.2",
|
||||
"chalk": "2.4.1",
|
||||
"console-stream": "^0.1.1",
|
||||
"deep-equal": "^1.0.1",
|
||||
"delay": "^4.3.0",
|
||||
"fs-extra": "^8.1.0",
|
||||
"get-stdin": "^6.0.0",
|
||||
"globby": "^10.0.1",
|
||||
"intercept-stdout": "^0.1.2",
|
||||
"lodash": "^4.17.19",
|
||||
"node-fetch": "~2.6.0",
|
||||
"semver": "^5.5.1",
|
||||
"tslib": "^1.10.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/chai": "^4.2.0",
|
||||
"@types/lodash": "~4.14.159",
|
||||
"@types/mocha": "^5.2.7",
|
||||
"@types/nock": "^11.1.0",
|
||||
"@types/node": "^10.14.15",
|
||||
"chai": "^4.2.0",
|
||||
"mocha": "^6.2.2",
|
||||
"nock": "^11.7.0",
|
||||
"nyc": "^14.1.1",
|
||||
"rimraf": "^2.6.3",
|
||||
"ts-node": "^8.3.0",
|
||||
"tslint": "^5.18.0",
|
||||
"typescript": "^3.5.3",
|
||||
"uuid": "^3.3.3"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
export default {}
|
|
@ -0,0 +1,11 @@
|
|||
module.exports = {
|
||||
parser: {
|
||||
parseFile: require('./lufile/parseFileContents').parseFile,
|
||||
validateLUISBlob: require('./luis/luisValidator')
|
||||
},
|
||||
sectionHandler: {
|
||||
luParser: require('./lufile/luParser'),
|
||||
sectionOperator: require('./lufile/sectionOperator'),
|
||||
luSectionTypes: require('./utils/enums/lusectiontypes')
|
||||
},
|
||||
}
|
|
@ -0,0 +1,96 @@
|
|||
const fs = require("fs");
|
||||
class Writer {
|
||||
constructor() {
|
||||
this.indentSize = 4;
|
||||
this.indentLevel = 0;
|
||||
this.outputStream = undefined;
|
||||
}
|
||||
async setOutputStream(outputPath) {
|
||||
const ConsoleStream = require('console-stream');
|
||||
const stream = outputPath ? fs.createWriteStream(outputPath) : ConsoleStream();
|
||||
const streamPromise = new Promise((resolve) => {
|
||||
if (stream instanceof fs.WriteStream) {
|
||||
stream.once('ready', (_fd) => {
|
||||
this.outputStream = stream;
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
else {
|
||||
this.outputStream = stream;
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
const timeoutPromise = new Promise((resolve) => {
|
||||
setTimeout(resolve, 2000);
|
||||
this.outputStream = stream;
|
||||
});
|
||||
return Promise.race([streamPromise, timeoutPromise]).then(() => {
|
||||
});
|
||||
}
|
||||
increaseIndentation() {
|
||||
this.indentLevel += this.indentSize;
|
||||
}
|
||||
decreaseIndentation() {
|
||||
this.indentLevel -= this.indentSize;
|
||||
}
|
||||
write(str) {
|
||||
this.outputStream.write(str);
|
||||
}
|
||||
writeLine(str = '') {
|
||||
if (typeof str === 'string') {
|
||||
this.write(str + '\n');
|
||||
}
|
||||
else {
|
||||
str.forEach(line => {
|
||||
this.write(line + '\n');
|
||||
});
|
||||
}
|
||||
}
|
||||
writeIndented(str) {
|
||||
let writeFunction = (text) => {
|
||||
for (let index = 0; index < this.indentLevel; index++) {
|
||||
this.write(' ');
|
||||
}
|
||||
this.write(text);
|
||||
};
|
||||
writeFunction.bind(this);
|
||||
if (typeof str === 'string') {
|
||||
writeFunction(str);
|
||||
}
|
||||
else {
|
||||
str.forEach(line => {
|
||||
writeFunction(line);
|
||||
});
|
||||
}
|
||||
}
|
||||
writeLineIndented(lines) {
|
||||
if (typeof lines === 'string') {
|
||||
this.writeIndented(lines + '\n');
|
||||
}
|
||||
else {
|
||||
lines.forEach(line => {
|
||||
this.writeIndented(line + '\n');
|
||||
});
|
||||
}
|
||||
}
|
||||
async closeOutputStream() {
|
||||
this.outputStream.end();
|
||||
const streamPromise = new Promise((resolve) => {
|
||||
if (this.outputStream instanceof fs.WriteStream) {
|
||||
this.outputStream.on('finish', (_fd) => {
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
const timeoutPromise = new Promise((resolve) => {
|
||||
setTimeout(resolve, 1000);
|
||||
});
|
||||
return Promise.race([streamPromise, timeoutPromise]).then(() => {
|
||||
this.outputStream = undefined;
|
||||
});
|
||||
}
|
||||
}
|
||||
module.exports = Writer;
|
|
@ -0,0 +1,313 @@
|
|||
const parse_multi_platform_luis_1 = require("./../luis/propertyHelper");
|
||||
const LuisGenBuilder = require('./../luis/luisGenBuilder')
|
||||
const exception = require('./../utils/exception');
|
||||
const Writer = require("./helpers/writer");
|
||||
const lodash = require("lodash")
|
||||
|
||||
module.exports = {
|
||||
writeFromLuisJson: async function(luisJson, className, space, outPath) {
|
||||
const app = LuisGenBuilder.build(luisJson);
|
||||
let writer = new Writer();
|
||||
await writer.setOutputStream(outPath);
|
||||
this.header(space, className, writer);
|
||||
writer.writeLine();
|
||||
this.intents(app.intents, writer);
|
||||
this.entities(app, writer);
|
||||
writer.writeLine();
|
||||
writer.writeLineIndented([
|
||||
'[JsonExtensionData(ReadData = true, WriteData = true)]',
|
||||
'public IDictionary<string, object> Properties {get; set; }'
|
||||
]);
|
||||
this.converter(className, writer);
|
||||
this.onError(writer);
|
||||
this.topScoringIntent(writer);
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented('}'); // Class
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented('}'); // Namespace
|
||||
await writer.closeOutputStream();
|
||||
},
|
||||
header: function(space, className, writer) {
|
||||
writer.writeLine([
|
||||
'// <auto-generated>',
|
||||
'// Code generated by luis:generate:cs',
|
||||
'// Tool github: https://github.com/microsoft/botframework-cli',
|
||||
'// Changes may cause incorrect behavior and will be lost if the code is',
|
||||
'// regenerated.',
|
||||
'// </auto-generated>',
|
||||
'using Newtonsoft.Json;',
|
||||
'using Newtonsoft.Json.Serialization;',
|
||||
'using System;',
|
||||
'using System.Collections.Generic;',
|
||||
'using Microsoft.Bot.Builder;',
|
||||
'using Microsoft.Bot.Builder.AI.Luis;',
|
||||
`namespace ${space}`,
|
||||
'{'
|
||||
]);
|
||||
writer.increaseIndentation();
|
||||
//Main class
|
||||
writer.writeLineIndented([
|
||||
`public partial class ${className}: IRecognizerConvert`,
|
||||
'{'
|
||||
]);
|
||||
writer.increaseIndentation();
|
||||
//Text
|
||||
writer.writeLineIndented([
|
||||
'[JsonProperty("text")]',
|
||||
'public string Text;'
|
||||
]);
|
||||
writer.writeLine();
|
||||
writer.writeLineIndented([
|
||||
'[JsonProperty("alteredText")]',
|
||||
'public string AlteredText;'
|
||||
]);
|
||||
},
|
||||
intents: function(intents, writer) {
|
||||
writer.writeLineIndented('public enum Intent {');
|
||||
writer.increaseIndentation();
|
||||
const lastIntent = intents.pop();
|
||||
intents.forEach((intent) => {
|
||||
writer.writeLineIndented(`${intent},`);
|
||||
});
|
||||
if (lastIntent) {
|
||||
writer.writeLineIndented(lastIntent);
|
||||
}
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented([
|
||||
'};',
|
||||
'[JsonProperty("intents")]',
|
||||
'public Dictionary<Intent, IntentScore> Intents;'
|
||||
]);
|
||||
},
|
||||
entities: function(app, writer) {
|
||||
writer.writeLine();
|
||||
writer.writeLineIndented([
|
||||
'public class _Entities',
|
||||
'{'
|
||||
]);
|
||||
writer.increaseIndentation();
|
||||
this.writeEntityBlock(app.entities, 'Simple entities', (entity) => {
|
||||
writer.writeLineIndented(this.getEntityWithType(entity));
|
||||
}, writer);
|
||||
this.writeEntityBlock(app.prebuiltEntities, 'Built-in entities', (entities) => {
|
||||
const entityType = entities[0];
|
||||
entities.forEach(entity => {
|
||||
writer.writeLineIndented(this.getEntityWithType(entity, entityType));
|
||||
});
|
||||
}, writer);
|
||||
this.writeEntityBlock(app.closedLists, 'Lists', (entity) => {
|
||||
writer.writeLineIndented(this.getEntityWithType(entity, 'list'));
|
||||
}, writer);
|
||||
this.writeEntityBlock(app.regex_entities, 'Regex entities', (entity) => {
|
||||
writer.writeLineIndented(this.getEntityWithType(entity));
|
||||
}, writer);
|
||||
this.writeEntityBlock(app.patternAnyEntities, 'Pattern.any', (entity) => {
|
||||
writer.writeLineIndented(this.getEntityWithType(entity));
|
||||
}, writer);
|
||||
// Composites
|
||||
if (app.composites.length > 0) {
|
||||
writer.writeLine();
|
||||
writer.writeLineIndented('// Composites');
|
||||
let first = true;
|
||||
app.composites.forEach(composite => {
|
||||
if (first) {
|
||||
first = false;
|
||||
}
|
||||
else {
|
||||
writer.writeLine();
|
||||
}
|
||||
writer.writeLineIndented([
|
||||
`public class _Instance${lodash.upperFirst(composite.compositeName)}`,
|
||||
'{'
|
||||
]);
|
||||
writer.increaseIndentation();
|
||||
composite.attributes.forEach(attr => {
|
||||
writer.writeLineIndented([
|
||||
`public InstanceData[] ${parse_multi_platform_luis_1.jsonPropertyName(attr)};`
|
||||
]);
|
||||
});
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented([
|
||||
'}',
|
||||
`public class ${lodash.upperFirst(composite.compositeName)}Class`,
|
||||
'{'
|
||||
]);
|
||||
writer.increaseIndentation();
|
||||
composite.attributes.forEach(attr => {
|
||||
writer.writeLineIndented(this.getEntityWithType(attr, app.closedLists.includes(attr) ? 'list' : attr));
|
||||
|
||||
});
|
||||
writer.writeLineIndented([
|
||||
'[JsonProperty("$instance")]',
|
||||
`public _Instance${lodash.upperFirst(composite.compositeName)} _instance;`
|
||||
]);
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented([
|
||||
'}',
|
||||
`public ${lodash.upperFirst(composite.compositeName)}Class[] ${composite.compositeName};`
|
||||
]);
|
||||
});
|
||||
}
|
||||
// Instance
|
||||
writer.writeLine();
|
||||
writer.writeLineIndented([
|
||||
'// Instance',
|
||||
'public class _Instance',
|
||||
'{'
|
||||
]);
|
||||
writer.increaseIndentation();
|
||||
app.getInstancesList().forEach(instanceData => {
|
||||
writer.writeLineIndented(`public InstanceData[] ${parse_multi_platform_luis_1.jsonPropertyName(instanceData)};`);
|
||||
});
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented([
|
||||
'}',
|
||||
'[JsonProperty("$instance")]',
|
||||
'public _Instance _instance;'
|
||||
]);
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented([
|
||||
'}',
|
||||
'[JsonProperty("entities")]',
|
||||
'public _Entities Entities;'
|
||||
]);
|
||||
},
|
||||
getEntityWithType: function(entityNameOrObject, entityType = '') {
|
||||
if (typeof entityNameOrObject === 'object' && 'name' in entityNameOrObject){
|
||||
if ('instanceOf' in entityNameOrObject){
|
||||
entityType = entityNameOrObject.instanceOf
|
||||
entityNameOrObject = entityNameOrObject.name
|
||||
} else if (entityNameOrObject.compositeInstanceOf) {
|
||||
let name = parse_multi_platform_luis_1.jsonPropertyName(entityNameOrObject.name)
|
||||
return `public ${lodash.upperFirst(name)}Class[] ${name};`
|
||||
} else {
|
||||
throw (new exception("Invalid LuisGen object: cannot parse entity"))
|
||||
}
|
||||
}
|
||||
let result = '';
|
||||
switch (entityType) {
|
||||
case 'age':
|
||||
result = 'public Age[]';
|
||||
break;
|
||||
case 'datetimeV2':
|
||||
result = 'public DateTimeSpec[]';
|
||||
break;
|
||||
case 'dimension':
|
||||
result = 'public Dimension[]';
|
||||
break;
|
||||
case 'geographyV2':
|
||||
result = 'public GeographyV2[]';
|
||||
break;
|
||||
case 'list':
|
||||
result = 'public string[][]';
|
||||
break;
|
||||
case 'money':
|
||||
result = 'public Money[]';
|
||||
break;
|
||||
case 'ordinalV2':
|
||||
result = 'public OrdinalV2[]';
|
||||
break;
|
||||
case 'temperature':
|
||||
result = 'public Temperature[]';
|
||||
break;
|
||||
case 'number':
|
||||
case 'ordinal':
|
||||
case 'percentage':
|
||||
result = 'public double[]';
|
||||
break;
|
||||
default:
|
||||
result = 'public string[]';
|
||||
}
|
||||
return result + ` ${parse_multi_platform_luis_1.jsonPropertyName(entityNameOrObject)};`;
|
||||
},
|
||||
converter: function(className, writer) {
|
||||
writer.writeLine();
|
||||
writer.writeLineIndented([
|
||||
'public void Convert(dynamic result)',
|
||||
'{'
|
||||
]);
|
||||
writer.increaseIndentation();
|
||||
writer.writeLineIndented(
|
||||
`var app = JsonConvert.DeserializeObject<${className}>(`,
|
||||
);
|
||||
writer.increaseIndentation();
|
||||
writer.writeLineIndented(
|
||||
'JsonConvert.SerializeObject('
|
||||
);
|
||||
writer.increaseIndentation();
|
||||
writer.writeLineIndented([
|
||||
'result,',
|
||||
'new JsonSerializerSettings { NullValueHandling = NullValueHandling.Ignore, Error = OnError }'
|
||||
]);
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented(')');
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented([
|
||||
');',
|
||||
'Text = app.Text;',
|
||||
'AlteredText = app.AlteredText;',
|
||||
'Intents = app.Intents;',
|
||||
'Entities = app.Entities;',
|
||||
'Properties = app.Properties;'
|
||||
]);
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented('}');
|
||||
},
|
||||
onError: function(writer) {
|
||||
writer.writeLine();
|
||||
writer.writeLineIndented([
|
||||
'private static void OnError(object sender, ErrorEventArgs args)',
|
||||
'{'
|
||||
]);
|
||||
writer.increaseIndentation();
|
||||
writer.writeLineIndented([
|
||||
'// If needed, put your custom error logic here',
|
||||
'Console.WriteLine(args.ErrorContext.Error.Message);',
|
||||
'args.ErrorContext.Handled = true;'
|
||||
]);
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented('}');
|
||||
},
|
||||
topScoringIntent: function(writer) {
|
||||
writer.writeLine();
|
||||
writer.writeLineIndented([
|
||||
'public (Intent intent, double score) TopIntent()',
|
||||
'{'
|
||||
]);
|
||||
writer.increaseIndentation();
|
||||
writer.writeLineIndented([
|
||||
'Intent maxIntent = Intent.None;',
|
||||
'var max = 0.0;',
|
||||
'foreach (var entry in Intents)',
|
||||
'{'
|
||||
]);
|
||||
writer.increaseIndentation();
|
||||
writer.writeLineIndented([
|
||||
'if (entry.Value.Score > max)',
|
||||
'{'
|
||||
]);
|
||||
writer.increaseIndentation();
|
||||
writer.writeLineIndented([
|
||||
'maxIntent = entry.Key;',
|
||||
'max = entry.Value.Score.Value;'
|
||||
]);
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented('}');
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented([
|
||||
'}',
|
||||
'return (maxIntent, max);'
|
||||
]);
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented('}');
|
||||
},
|
||||
writeEntityBlock: function(entities, message, logic, writer) {
|
||||
if (entities.length > 0) {
|
||||
if (message !== '') {
|
||||
writer.writeLineIndented(`// ${message}`);
|
||||
}
|
||||
entities.forEach(logic);
|
||||
writer.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,149 @@
|
|||
const parse_multi_platform_luis_1 = require('./../luis/propertyHelper');
|
||||
const LuisGenBuilder = require('./../luis/luisGenBuilder')
|
||||
const Writer = require('./helpers/writer')
|
||||
|
||||
module.exports = {
|
||||
writeFromLuisJson: async function(luisJson, className, outPath) {
|
||||
const app = LuisGenBuilder.build(luisJson);
|
||||
let writer = new Writer();
|
||||
writer.indentSize = 2;
|
||||
await writer.setOutputStream(outPath);
|
||||
this.header(writer);
|
||||
this.intents(app, writer);
|
||||
this.entities(app, writer);
|
||||
this.classInterface(className, writer);
|
||||
await writer.closeOutputStream();
|
||||
},
|
||||
header: function(writer) {
|
||||
writer.writeLine([
|
||||
'/**',
|
||||
' * <auto-generated>',
|
||||
' * Code generated by luis:generate:ts',
|
||||
' * Tool github: https://github.com/microsoft/botframework-cli',
|
||||
' * Changes may cause incorrect behavior and will be lost if the code is',
|
||||
' * regenerated.',
|
||||
' * </auto-generated>',
|
||||
' */',
|
||||
"import {DateTimeSpec, GeographyV2, InstanceData, IntentData, NumberWithUnits, OrdinalV2} from 'botbuilder-ai'"
|
||||
]);
|
||||
},
|
||||
intents: function(app, writer) {
|
||||
writer.writeLine();
|
||||
writer.writeLineIndented('export interface GeneratedIntents {');
|
||||
writer.increaseIndentation();
|
||||
app.intents.forEach((intent) => {
|
||||
writer.writeLineIndented(`${parse_multi_platform_luis_1.normalizeName(intent)}: IntentData`);
|
||||
});
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLine('}');
|
||||
},
|
||||
entities: function(app, writer) {
|
||||
// Composite instance and data
|
||||
app.composites.forEach((composite) => {
|
||||
let name = parse_multi_platform_luis_1.normalizeName(composite.compositeName);
|
||||
writer.writeLine();
|
||||
writer.writeLineIndented(`export interface GeneratedInstance${name} {`);
|
||||
writer.increaseIndentation();
|
||||
composite.attributes.forEach((attribute) => {
|
||||
writer.writeLineIndented(`${parse_multi_platform_luis_1.jsonPropertyName(attribute)}?: InstanceData[]`);
|
||||
});
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented('}');
|
||||
writer.writeLineIndented(`export interface ${name} {`);
|
||||
writer.increaseIndentation();
|
||||
composite.attributes.forEach(attribute => {
|
||||
writer.writeLineIndented(this.getEntityWithType(attribute, this.isList(attribute, app)));
|
||||
});
|
||||
writer.writeLineIndented(`$instance?: GeneratedInstance${name}`);
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented('}');
|
||||
});
|
||||
writer.writeLine();
|
||||
// Entity instance
|
||||
writer.writeLineIndented('export interface GeneratedInstance {');
|
||||
writer.increaseIndentation();
|
||||
app.getInstancesList().forEach(instance => {
|
||||
writer.writeLineIndented(`${parse_multi_platform_luis_1.jsonPropertyName(instance)}?: InstanceData[]`);
|
||||
});
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented('}');
|
||||
// Entities
|
||||
writer.writeLine();
|
||||
writer.writeLineIndented('export interface GeneratedEntities {');
|
||||
writer.increaseIndentation();
|
||||
this.writeEntityGroup(app.entities, '// Simple entities', writer);
|
||||
writer.writeLineIndented('// Built-in entities');
|
||||
app.prebuiltEntities.forEach(builtInEntity => {
|
||||
builtInEntity.forEach(entity => {
|
||||
writer.writeLineIndented(this.getEntityWithType(entity));
|
||||
});
|
||||
});
|
||||
writer.writeLine();
|
||||
this.writeEntityGroup(app.closedLists, '// Lists', writer, true);
|
||||
this.writeEntityGroup(app.regex_entities, '// Regex entities', writer);
|
||||
this.writeEntityGroup(app.patternAnyEntities, '// Pattern.any', writer);
|
||||
// Composites
|
||||
writer.writeLineIndented('// Composites');
|
||||
app.composites.forEach(composite => {
|
||||
writer.writeLineIndented(`${composite.compositeName}?: ${composite.compositeName}[]`);
|
||||
});
|
||||
writer.writeLineIndented('$instance: GeneratedInstance');
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented('}');
|
||||
},
|
||||
classInterface: function(className, writer) {
|
||||
writer.writeLine();
|
||||
writer.writeLineIndented(`export interface ${className} {`);
|
||||
writer.increaseIndentation();
|
||||
writer.writeLineIndented([
|
||||
'text: string',
|
||||
'alteredText?: string',
|
||||
'intents: GeneratedIntents',
|
||||
'entities: GeneratedEntities',
|
||||
'[propName: string]: any'
|
||||
]);
|
||||
writer.decreaseIndentation();
|
||||
writer.writeLineIndented('}');
|
||||
},
|
||||
writeEntityGroup: function(entityGroup, description, writer, isListType = false) {
|
||||
writer.writeLineIndented(description);
|
||||
entityGroup.forEach(entity => {
|
||||
writer.writeLineIndented(this.getEntityWithType(entity, isListType));
|
||||
});
|
||||
writer.writeLine();
|
||||
},
|
||||
isList: function(entityName, app) {
|
||||
return app.closedLists.includes(entityName);
|
||||
},
|
||||
getEntityWithType: function(entityName, isListType = false) {
|
||||
let result = '';
|
||||
switch (isListType ? 'list' : entityName) {
|
||||
case 'age':
|
||||
case 'dimension':
|
||||
case 'money':
|
||||
case 'temperature':
|
||||
result = '?: NumberWithUnits[]';
|
||||
break;
|
||||
case 'geographyV2':
|
||||
result = '?: GeographyV2[]';
|
||||
break;
|
||||
case 'ordinalV2':
|
||||
result = '?: OrdinalV2[]';
|
||||
break;
|
||||
case 'number':
|
||||
case 'ordinal':
|
||||
case 'percentage':
|
||||
result = '?: number[]';
|
||||
break;
|
||||
case 'datetimeV2':
|
||||
result = '?: DateTimeSpec[]';
|
||||
break;
|
||||
case 'list':
|
||||
result = '?: string[][]';
|
||||
break;
|
||||
default:
|
||||
result = '?: string[]';
|
||||
}
|
||||
return parse_multi_platform_luis_1.jsonPropertyName(entityName) + result;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const fs = require('fs-extra')
|
||||
const path = require('path')
|
||||
const exception = require('../utils/exception')
|
||||
const retCode = require('../utils/enums/CLI-errors')
|
||||
const fileHelper = require('../../utils/filehelper')
|
||||
|
||||
const dialogExt = '.dialog'
|
||||
const luExt = '.lu'
|
||||
|
||||
module.exports = {
|
||||
generateConfig: async function (inputFolder, rootDialogFile) {
|
||||
let dialogFiles = []
|
||||
await getDialogFiles(inputFolder, dialogFiles)
|
||||
|
||||
let rootDialogObject = JSON.parse(await getInputFromFile(rootDialogFile))
|
||||
rootDialogObject.path = rootDialogFile
|
||||
rootDialogObject.isRoot = true
|
||||
|
||||
let dialogObjects = []
|
||||
for (const dialogFile of dialogFiles) {
|
||||
let dialogObject = JSON.parse(await getInputFromFile(dialogFile))
|
||||
dialogObject.path = dialogFile
|
||||
dialogObjects.push(dialogObject)
|
||||
}
|
||||
|
||||
const configObject = createConfig(rootDialogObject, dialogObjects, inputFolder)
|
||||
|
||||
return JSON.stringify(configObject)
|
||||
}
|
||||
}
|
||||
|
||||
const getDialogFiles = async function (inputFolder, results) {
|
||||
fs.readdirSync(inputFolder).forEach(async dirContent => {
|
||||
dirContent = path.resolve(inputFolder, dirContent)
|
||||
if (fs.statSync(dirContent).isDirectory()) {
|
||||
await getDialogFiles(dirContent, results)
|
||||
}
|
||||
|
||||
if (fs.statSync(dirContent).isFile()) {
|
||||
if (dirContent.endsWith(dialogExt)) {
|
||||
results.push(dirContent)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const getInputFromFile = async function (path) {
|
||||
if (path) {
|
||||
try {
|
||||
return await fileHelper.getContentFromFile(path)
|
||||
} catch (error) {
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT, `Failed to read file: ${error}`))
|
||||
}
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
const createConfig = function (rootDialog, dialogs, configPath) {
|
||||
let result = {}
|
||||
|
||||
const key = createPath(rootDialog.path, configPath)
|
||||
const rootLuPath = rootDialog.path.replace(dialogExt, luExt)
|
||||
|
||||
if (!fs.existsSync(rootLuPath)) {
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT, `Failed to parse mapping rules config from file system: ${rootLuPath} does not exist. Please provide config file by --config`))
|
||||
}
|
||||
|
||||
rootDialog.triggers.forEach(trigger => {
|
||||
if (trigger.$type && trigger.$type === 'Microsoft.OnIntent') {
|
||||
const actions = trigger.actions || []
|
||||
for (const action of actions) {
|
||||
if (action.$type !== 'Microsoft.BeginDialog') continue
|
||||
|
||||
const dialogName = action.dialog
|
||||
const target = dialogs.find(dialog => path.basename(dialog.path, dialogExt) === dialogName)
|
||||
|
||||
if (!target) continue
|
||||
|
||||
const relativePath = createPath(target.path, configPath)
|
||||
if (!result[key]) result[key] = { triggers: {} }
|
||||
if (!result[key].triggers[trigger.intent]) {
|
||||
result[key].triggers[trigger.intent] = relativePath
|
||||
} else if (typeof result[key].triggers[trigger.intent] === 'string') {
|
||||
result[key].triggers[trigger.intent] = [result[key].triggers[trigger.intent], relativePath]
|
||||
} else {
|
||||
result[key].triggers[trigger.intent].push(relativePath)
|
||||
}
|
||||
|
||||
result = { ...result, ...createConfig(target, dialogs, configPath) }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (rootDialog.isRoot && result[key]) result[key].rootDialog = true
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
const createPath = function (dialogPath, configPath) {
|
||||
const luFilePath = dialogPath.replace('.dialog', '.lu')
|
||||
const relativePath = path.relative(configPath, luFilePath)
|
||||
return relativePath
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const fs = require('fs-extra')
|
||||
const path = require('path')
|
||||
const file = require('../../utils/filehelper')
|
||||
const fileExtEnum = require('../utils/helpers').FileExtTypeEnum
|
||||
const crossTrainer = require('./crossTrainer')
|
||||
const confighelper = require('./confighelper')
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
* Generate cross train config based on input folder and root dialog file.
|
||||
* @param {string} inputFolder full path of input lu and qna files folder.
|
||||
* @param {string} rootDialogFile full path of root dialog file.
|
||||
* @returns {string} config object json string.
|
||||
*/
|
||||
generateConfig: async function (inputFolder, rootDialogFile) {
|
||||
const configStr = await confighelper.generateConfig(inputFolder, rootDialogFile)
|
||||
|
||||
return configStr
|
||||
},
|
||||
|
||||
/**
|
||||
* Cross train lu and qna files.
|
||||
* @param {string} input full path of input lu and qna files folder.
|
||||
* @param {string} intentName interruption intent name. Default value is _Interruption.
|
||||
* @param {string} config path to config of mapping rules or mapping rules json content itself. If undefined, it will read config.json from input folder.
|
||||
* @param {boolean} verbose verbose to indicate whether log warnings and errors or not when parsing cross-train files.
|
||||
* @returns {luResult: any, qnaResult: any} trainedResult of luResult and qnaResult or undefined if no results.
|
||||
*/
|
||||
train: async function (input, intentName, config, verbose) {
|
||||
// Get all related file content.
|
||||
const luContents = await file.getFilesContent(input, fileExtEnum.LUFile)
|
||||
const qnaContents = await file.getFilesContent(input, fileExtEnum.QnAFile)
|
||||
const configContent = config && !fs.existsSync(config) ? {id: path.join(input, 'config.json'), content: config} : await file.getConfigContent(config)
|
||||
|
||||
const configObject = file.getConfigObject(configContent, intentName, verbose)
|
||||
|
||||
const trainedResult = await crossTrainer.crossTrain(luContents, qnaContents, configObject)
|
||||
|
||||
return trainedResult
|
||||
}
|
||||
}
|
|
@ -0,0 +1,574 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const helpers = require('../utils/helpers')
|
||||
const fileExtEnum = require('../utils/helpers').FileExtTypeEnum
|
||||
const luParser = require('../lufile/luParser')
|
||||
const SectionOperator = require('../lufile/sectionOperator')
|
||||
const LUSectionTypes = require('../utils/enums/lusectiontypes')
|
||||
const LUResource = require('../lufile/luResource')
|
||||
const DiagnosticSeverity = require('../lufile/diagnostic').DiagnosticSeverity
|
||||
const fileHelper = require('../../utils/filehelper')
|
||||
const exception = require('../utils/exception')
|
||||
const retCode = require('../utils/enums/CLI-errors')
|
||||
const prebuiltEntityTypes = require('../utils/enums/luisbuiltintypes').consolidatedList
|
||||
const LuisBuilderVerbose = require('./../luis/luisCollate')
|
||||
const Luis = require('./../luis/luis')
|
||||
const qnaBuilderVerbose = require('./../qna/qnamaker/kbCollate')
|
||||
const NEWLINE = require('os').EOL
|
||||
const path = require('path')
|
||||
const QNA_GENERIC_SOURCE = "custom editorial"
|
||||
const MAX_QUESTIONS_PER_ANSWER = 1000
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
* Do cross training among lu files
|
||||
* @param {any[]} luContents the lu content array whose element includes path and content
|
||||
* @param {any[]} qnaContents the qna content array whose element includes path and content
|
||||
* @param {any} crossTrainConfig cross train json config
|
||||
* @param {any} importResolver import Resolver when resolving import files
|
||||
* @returns {Map<string, LUResource>} map of file id and luResource
|
||||
* @throws {exception} throws errors
|
||||
*/
|
||||
crossTrain: async function (luContents, qnaContents, crossTrainConfig, importResolver) {
|
||||
try {
|
||||
let {luObjectArray, qnaObjectArray} = pretreatment(luContents, qnaContents)
|
||||
const {rootIds, triggerRules, intentName, verbose} = crossTrainConfig
|
||||
|
||||
// parse lu content to LUResource object
|
||||
let {fileIdToResourceMap: luFileIdToResourceMap, allEmpty: allLuEmpty} = await parseAndValidateContent(luObjectArray, verbose, importResolver)
|
||||
|
||||
// parse qna content to LUResource object
|
||||
let {fileIdToResourceMap: qnaFileIdToResourceMap, allEmpty: allQnAEmpty} = await parseAndValidateContent(qnaObjectArray, verbose, importResolver)
|
||||
|
||||
if (!allLuEmpty) {
|
||||
// construct resource tree to build the father-children relationship among lu files
|
||||
let resources = constructResoureTree(luFileIdToResourceMap, triggerRules)
|
||||
|
||||
// do lu cross training from roots. One root one core training
|
||||
for (const rootObjectId of rootIds) {
|
||||
if (resources.some(r => r.id.toLowerCase() === rootObjectId.toLowerCase())) {
|
||||
// do cross training for each root at top level
|
||||
const result = luCrossTrain(rootObjectId, resources, qnaFileIdToResourceMap, intentName)
|
||||
for (const res of result) {
|
||||
luFileIdToResourceMap.set(res.id, res.content)
|
||||
}
|
||||
} else {
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT, `Sorry, root lu file '${rootObjectId}' does not exist`))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!allQnAEmpty) {
|
||||
// do qna cross training with lu files
|
||||
qnaCrossTrain(qnaFileIdToResourceMap, luFileIdToResourceMap, intentName, allLuEmpty)
|
||||
}
|
||||
|
||||
return { luResult: luFileIdToResourceMap, qnaResult: qnaFileIdToResourceMap }
|
||||
} catch (err) {
|
||||
throw (err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Contruct resource tree to build the father-children relationship among lu files
|
||||
* @param {Map<string, LUResource>} fileIdToLuResourceMap Map of file id and luResource
|
||||
* @param {any} triggerRules trigger rules object that indicate the triggering rules from root to dest lu files
|
||||
* @returns {any[]} object array of LUResource with id and children properties
|
||||
* @throws {exception} throws errors
|
||||
*/
|
||||
const constructResoureTree = function (fileIdToLuResourceMap, triggerRules) {
|
||||
let resources = []
|
||||
let fileIdsFromInput = Array.from(fileIdToLuResourceMap.keys())
|
||||
let lowerCasefileIdsFromInput = Array.from(fileIdToLuResourceMap.keys()).map(x => x.toLowerCase())
|
||||
let triggerKeys = Object.keys(triggerRules)
|
||||
let lowerCaseTriggerKeys = triggerKeys.map(x => x.toLowerCase())
|
||||
|
||||
for (const fileId of fileIdsFromInput) {
|
||||
let luResource = fileIdToLuResourceMap.get(fileId)
|
||||
let resource = {
|
||||
id: fileId,
|
||||
content: luResource,
|
||||
children: []
|
||||
}
|
||||
|
||||
if (!lowerCaseTriggerKeys.includes(fileId.toLowerCase())) {
|
||||
resources.push(resource)
|
||||
continue
|
||||
}
|
||||
|
||||
let intents = []
|
||||
for (const section of luResource.Sections) {
|
||||
if (section.SectionType === LUSectionTypes.SIMPLEINTENTSECTION
|
||||
|| section.SectionType === LUSectionTypes.NESTEDINTENTSECTION) {
|
||||
intents.push(section)
|
||||
}
|
||||
}
|
||||
|
||||
const intentToDestLuFiles = triggerRules[triggerKeys.find(k => k.toLowerCase() === fileId.toLowerCase())]
|
||||
for (const triggerIntent of Object.keys(intentToDestLuFiles)) {
|
||||
if (triggerIntent !== '' && !intents.some(i => i.Name === triggerIntent)) {
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT, `Sorry, trigger intent '${triggerIntent}' is not found in lu file: ${fileId}`))
|
||||
}
|
||||
|
||||
let destLuFiles = intentToDestLuFiles[triggerIntent]
|
||||
if (typeof destLuFiles === 'string') destLuFiles = [destLuFiles]
|
||||
|
||||
if (destLuFiles.length > 0) {
|
||||
destLuFiles.forEach(destLuFile => {
|
||||
if (destLuFile !== '' && !lowerCasefileIdsFromInput.includes(destLuFile.toLowerCase())) {
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT, `Sorry, lu file '${destLuFile}' is not found`))
|
||||
} else {
|
||||
resource.children.push({
|
||||
target: fileIdsFromInput.find(x => x.toLowerCase() === destLuFile.toLowerCase()) || '',
|
||||
intent: triggerIntent
|
||||
})
|
||||
}
|
||||
})
|
||||
} else {
|
||||
resource.children.push({
|
||||
target: '',
|
||||
intent: triggerIntent
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
resources.push(resource)
|
||||
}
|
||||
|
||||
return resources
|
||||
}
|
||||
|
||||
/**
|
||||
* Lu cross training core function. Do lu cross training from a root to its children once.
|
||||
* @param {string} rootResourceId the root resource object id
|
||||
* @param {any[]} resources all lu resource object list
|
||||
* @param {any[]} qnaFileToResourceMap map of qna file id and resource
|
||||
* @param {string} intentName interruption intent name
|
||||
* @returns {any[]} updated resource objects
|
||||
*/
|
||||
const luCrossTrain = function (rootResourceId, resources, qnaFileToResourceMap, intentName) {
|
||||
const idToResourceMap = new Map()
|
||||
for (const resource of resources) {
|
||||
idToResourceMap.set(resource.id, resource)
|
||||
}
|
||||
|
||||
// Parse resources
|
||||
let rootResource = resources.filter(r => r.id.toLowerCase() === rootResourceId.toLowerCase())[0]
|
||||
rootResource.visited = true
|
||||
mergeRootInterruptionToLeaves(rootResource, idToResourceMap, qnaFileToResourceMap, intentName)
|
||||
|
||||
return Array.from(idToResourceMap.values())
|
||||
}
|
||||
|
||||
const mergeRootInterruptionToLeaves = function (rootResource, result, qnaFileToResourceMap, intentName) {
|
||||
if (rootResource.children === undefined || rootResource.length <= 0) return
|
||||
|
||||
rootResource.content = removeDupUtterances(rootResource.content)
|
||||
|
||||
mergeBrothersInterruption(rootResource, result, intentName)
|
||||
for (const child of rootResource.children) {
|
||||
let childResource = result.get(child.target)
|
||||
if (childResource && childResource.visited === undefined) {
|
||||
let rootQnaFileId = rootResource.id.toLowerCase().replace(new RegExp(helpers.FileExtTypeEnum.LUFile + '$'), helpers.FileExtTypeEnum.QnAFile)
|
||||
rootQnaFileId = Array.from(qnaFileToResourceMap.keys()).find(x => x.toLowerCase() === rootQnaFileId)
|
||||
const rootQnaResource = qnaFileToResourceMap.get(rootQnaFileId)
|
||||
const newChildResource = mergeFatherInterruptionToChild(rootResource, rootQnaResource, childResource, intentName)
|
||||
result.set(child.target, newChildResource)
|
||||
newChildResource.visited = true
|
||||
mergeRootInterruptionToLeaves(newChildResource, result, qnaFileToResourceMap, intentName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mergeBrothersInterruption = function (resource, result, intentName) {
|
||||
let children = resource.children
|
||||
for (const child of children) {
|
||||
const triggerIntent = child.intent
|
||||
const destLuFile = child.target
|
||||
let intentsWithSameTarget = []
|
||||
if (destLuFile !== '') intentsWithSameTarget = children.filter(c => c.target === destLuFile && c.intent !== '').map(x => x.intent)
|
||||
|
||||
const brotherSections = resource.content.Sections.filter(s => s.Name !== triggerIntent
|
||||
&& s.Name !== intentName
|
||||
&& (s.SectionType === LUSectionTypes.SIMPLEINTENTSECTION || s.SectionType === LUSectionTypes.NESTEDINTENTSECTION)
|
||||
&& children.some(brotherChild => brotherChild.intent === s.Name)
|
||||
&& !intentsWithSameTarget.some(intent => intent === s.Name))
|
||||
|
||||
let brotherUtterances = []
|
||||
brotherSections.forEach(s => {
|
||||
if (s.SectionType === LUSectionTypes.SIMPLEINTENTSECTION) {
|
||||
brotherUtterances = brotherUtterances.concat(s.UtteranceAndEntitiesMap.map(u => u.utterance).filter(i => !patternWithPrebuiltEntity(i)))
|
||||
} else {
|
||||
s.SimpleIntentSections.forEach(section => {
|
||||
brotherUtterances = brotherUtterances.concat(section.UtteranceAndEntitiesMap.map(u => u.utterance).filter(i => !patternWithPrebuiltEntity(i)))
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
let targetResource = result.get(child.target)
|
||||
|
||||
// Merge direct brother's utterances
|
||||
if (targetResource) {
|
||||
targetResource = mergeInterruptionIntent(brotherUtterances, targetResource, intentName)
|
||||
result.set(targetResource.id, targetResource)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mergeFatherInterruptionToChild = function (fatherResource, fatherQnaResource, childResource, intentName) {
|
||||
let fatherUtterances = []
|
||||
|
||||
// extract father existing interruption utterances
|
||||
const fatherInterruptions = fatherResource.content.Sections.filter(s => s.Name === intentName)
|
||||
if (fatherInterruptions && fatherInterruptions.length > 0) {
|
||||
const fatherInterruption = fatherInterruptions[0]
|
||||
fatherUtterances = fatherUtterances.concat(fatherInterruption.UtteranceAndEntitiesMap.map(u => u.utterance))
|
||||
}
|
||||
|
||||
// extract corresponding qna questions from father
|
||||
let questions = []
|
||||
if (fatherQnaResource) {
|
||||
const qnaSections = fatherQnaResource.Sections.filter(s => s.SectionType === LUSectionTypes.QNASECTION)
|
||||
qnaSections.forEach(q => questions = questions.concat(q.Questions))
|
||||
}
|
||||
|
||||
fatherUtterances = fatherUtterances.concat(questions)
|
||||
if (fatherUtterances.length > 0) {
|
||||
childResource = mergeInterruptionIntent(fatherUtterances, childResource, intentName)
|
||||
}
|
||||
|
||||
return childResource
|
||||
}
|
||||
|
||||
const mergeInterruptionIntent = function (fromUtterances, toResource, intentName) {
|
||||
// remove duplicated utterances in fromUtterances
|
||||
const dedupFromUtterances = Array.from(new Set(fromUtterances))
|
||||
let existingUtterances = extractIntentUtterances(toResource.content).map(u => u.toLowerCase())
|
||||
const toInterruptions = toResource.content.Sections.filter(section => section.Name === intentName)
|
||||
if (toInterruptions && toInterruptions.length > 0) {
|
||||
const toInterruption = toInterruptions[0]
|
||||
// construct new content here
|
||||
let newFileContent = ''
|
||||
dedupFromUtterances.forEach(utterance => {
|
||||
if (!existingUtterances.includes(utterance.toLowerCase())) {
|
||||
newFileContent += '- ' + utterance + NEWLINE
|
||||
}
|
||||
})
|
||||
|
||||
if (newFileContent === '') return toResource
|
||||
|
||||
const existingContent = `# ${toInterruption.Name}${NEWLINE}${toInterruption.Body}`
|
||||
newFileContent = existingContent + NEWLINE + newFileContent
|
||||
let lines = newFileContent.split(/\r?\n/)
|
||||
let newLines = []
|
||||
lines.forEach(line => {
|
||||
if (line.trim().startsWith('-')) {
|
||||
newLines.push('- ' + line.trim().slice(1).trim())
|
||||
} else if (line.trim().startsWith('##')) {
|
||||
newLines.push('## ' + line.trim().slice(2).trim())
|
||||
} else if (line.trim().startsWith('#')) {
|
||||
newLines.push('# ' + line.trim().slice(1).trim())
|
||||
}
|
||||
})
|
||||
|
||||
newFileContent = newLines.join(NEWLINE)
|
||||
|
||||
// update section here
|
||||
toResource.content = new SectionOperator(toResource.content).updateSection(toInterruption.Id, newFileContent)
|
||||
} else {
|
||||
// construct new content here
|
||||
const dedupUtterances = dedupFromUtterances.filter(u => !existingUtterances.includes(u.toLowerCase()))
|
||||
if (dedupUtterances && dedupUtterances.length > 0) {
|
||||
let newFileContent = `${NEWLINE}> Source: cross training. Please do not edit these directly!${NEWLINE}# ${intentName}${NEWLINE}- `
|
||||
newFileContent += dedupUtterances.join(`${NEWLINE}- `)
|
||||
|
||||
// add section here
|
||||
// not add the interruption intent if original file is empty
|
||||
if (toResource.content.Content !== '') {
|
||||
toResource.content = new SectionOperator(toResource.content).addSection(newFileContent)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return toResource
|
||||
}
|
||||
|
||||
const removeDupUtterances = function (resource) {
|
||||
let newResource = resource
|
||||
resource.Sections.forEach(s => {
|
||||
if (s.SectionType === LUSectionTypes.SIMPLEINTENTSECTION) {
|
||||
const intentUtterances = s.UtteranceAndEntitiesMap.map(u => u.utterance)
|
||||
const dedupedUtterances = Array.from(new Set(intentUtterances))
|
||||
if (intentUtterances.length > dedupedUtterances.length) {
|
||||
const intentContent = dedupedUtterances.join(NEWLINE + '- ')
|
||||
const newSectionContent = `# ${s.Name}${NEWLINE}- ${intentContent}`
|
||||
newResource = new SectionOperator(newResource).updateSection(s.Id, newSectionContent)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return newResource
|
||||
}
|
||||
|
||||
const extractIntentUtterances = function(resource, intentName) {
|
||||
const intentSections = resource.Sections.filter(s => s.SectionType === LUSectionTypes.SIMPLEINTENTSECTION || s.SectionType === LUSectionTypes.NESTEDINTENTSECTION)
|
||||
|
||||
let intentUtterances = []
|
||||
if (intentName && intentName !== '') {
|
||||
const specificSections = intentSections.filter(s => s.Name === intentName)
|
||||
if (specificSections.length > 0) {
|
||||
intentUtterances = intentUtterances.concat(specificSections[0].UtteranceAndEntitiesMap.map(u => u.utterance))
|
||||
}
|
||||
} else {
|
||||
intentSections.forEach(s => {
|
||||
if (s.SectionType === LUSectionTypes.SIMPLEINTENTSECTION) {
|
||||
intentUtterances = intentUtterances.concat(s.UtteranceAndEntitiesMap.map(u => u.utterance))
|
||||
} else {
|
||||
s.SimpleIntentSections.forEach(section => {
|
||||
intentUtterances = intentUtterances.concat(section.UtteranceAndEntitiesMap.map(u => u.utterance))
|
||||
})
|
||||
}
|
||||
})}
|
||||
|
||||
return intentUtterances
|
||||
}
|
||||
|
||||
/**
|
||||
* do qna cross training with lu files
|
||||
* @param {Map<string, LUResource>} qnaFileIdToResourceMap map of qna file id and resource
|
||||
* @param {Map<string, LUResource>} luFileIdToResourceMap map of lu file id and resource
|
||||
* @param {string} interruptionIntentName interruption intent name
|
||||
* @param {boolean} allLuEmpty indicate if all lu files are section empty
|
||||
* @throws {exception} throws errors
|
||||
*/
|
||||
const qnaCrossTrain = function (qnaFileIdToResourceMap, luFileIdToResourceMap, interruptionIntentName, allLuEmpty) {
|
||||
try {
|
||||
for (const qnaObjectId of Array.from(qnaFileIdToResourceMap.keys())) {
|
||||
let luObjectId = qnaObjectId.toLowerCase().replace(new RegExp(helpers.FileExtTypeEnum.QnAFile + '$'), helpers.FileExtTypeEnum.LUFile)
|
||||
let fileName = path.basename(qnaObjectId, path.extname(qnaObjectId))
|
||||
const culture = fileHelper.getCultureFromPath(qnaObjectId)
|
||||
fileName = culture ? fileName.substring(0, fileName.length - culture.length - 1) : fileName
|
||||
|
||||
luObjectId = Array.from(luFileIdToResourceMap.keys()).find(x => x.toLowerCase() === luObjectId)
|
||||
if (luObjectId) {
|
||||
const { luResource, qnaResource } = qnaCrossTrainCore(luFileIdToResourceMap.get(luObjectId), qnaFileIdToResourceMap.get(qnaObjectId), fileName, interruptionIntentName, allLuEmpty)
|
||||
luFileIdToResourceMap.set(luObjectId, luResource)
|
||||
qnaFileIdToResourceMap.set(qnaObjectId, qnaResource)
|
||||
} else {
|
||||
let qnaResource = qnaAddMetaData(qnaFileIdToResourceMap.get(qnaObjectId), fileName)
|
||||
qnaFileIdToResourceMap.set(qnaObjectId, qnaResource)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
throw (err)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* qna cross training core function
|
||||
* @param {LUResource} luResource the lu resource
|
||||
* @param {LUResource} qnaResource the qna resource
|
||||
* @param {string} fileName file name
|
||||
* @param {string} interruptionIntentName interruption intent name
|
||||
* @param {boolean} allLuEmpty indicate if all lu files are section empty
|
||||
* @returns {luResource: LUResource, qnaResource: LUResource} cross trained lu resource and qna resource
|
||||
*/
|
||||
const qnaCrossTrainCore = function (luResource, qnaResource, fileName, interruptionIntentName, allLuEmpty) {
|
||||
let trainedLuResource = luResource
|
||||
let trainedQnaResource = qnaResource
|
||||
|
||||
// extract questions
|
||||
const qnaSections = qnaResource.Sections.filter(s => s.SectionType === LUSectionTypes.QNASECTION)
|
||||
let questions = []
|
||||
qnaSections.forEach(q => questions = questions.concat(q.Questions))
|
||||
|
||||
// remove dups of questions themselves
|
||||
questions = Array.from(new Set(questions))
|
||||
|
||||
// extract lu utterances of all intents
|
||||
let utterances = extractIntentUtterances(luResource)
|
||||
utterances = Array.from(new Set(utterances))
|
||||
|
||||
// extract lu utterances of interruption intent
|
||||
let utterancesOfInterruption = extractIntentUtterances(luResource, interruptionIntentName)
|
||||
|
||||
// extract lu utterances except interruption
|
||||
let utterancesOfLocalIntents = utterances.filter(u => !utterancesOfInterruption.includes(u)).map(u => u.toLowerCase())
|
||||
|
||||
// remove questions which are duplicated with local lu utterances
|
||||
let dedupedQuestions = questions.filter(q => !utterancesOfLocalIntents.includes(q.toLowerCase()))
|
||||
|
||||
// update interruption intent if there are duplications with questions
|
||||
let dedupedQuestionsOfLowerCase = dedupedQuestions.map(u => u.toLowerCase())
|
||||
if (utterancesOfInterruption.some(u => dedupedQuestionsOfLowerCase.includes(u.toLowerCase()))) {
|
||||
utterancesOfInterruption = utterancesOfInterruption.filter(u => !dedupedQuestionsOfLowerCase.includes(u.toLowerCase()))
|
||||
|
||||
// get section id
|
||||
const sectionId = trainedLuResource.Sections.filter(s => s.Name === interruptionIntentName)[0].Id
|
||||
|
||||
// construct updated interruption intent content
|
||||
utterancesOfInterruption = utterancesOfInterruption.map(u => '- '.concat(u))
|
||||
let updatedSectionContent = utterancesOfInterruption.join(NEWLINE)
|
||||
if (updatedSectionContent && updatedSectionContent !== '') {
|
||||
trainedLuResource = new SectionOperator(trainedLuResource).updateSection(sectionId, `# ${interruptionIntentName}${NEWLINE}${updatedSectionContent}`)
|
||||
}
|
||||
}
|
||||
|
||||
// construct questions content
|
||||
dedupedQuestions = dedupedQuestions.map(q => '- '.concat(q)).filter(i => !patternWithPrebuiltEntity(i))
|
||||
let questionsContent = dedupedQuestions.join(NEWLINE)
|
||||
|
||||
// cross training comments
|
||||
const crossTrainingComments = '> Source: cross training. Please do not edit these directly!'
|
||||
|
||||
// add questions from qna file to corresponding lu file with intent named DeferToRecognizer_QnA_${fileName}
|
||||
if (!allLuEmpty && questionsContent && questionsContent !== '') {
|
||||
const questionsToUtterances = `${NEWLINE}${crossTrainingComments}${NEWLINE}# DeferToRecognizer_QnA_${fileName}${NEWLINE}${questionsContent}`
|
||||
trainedLuResource = new SectionOperator(trainedLuResource).addSection(questionsToUtterances)
|
||||
}
|
||||
|
||||
// update qna filters
|
||||
trainedQnaResource = qnaAddMetaData(qnaResource, fileName)
|
||||
|
||||
// remove utterances with curly brackets
|
||||
const utterancesWithoutPatterns = utterances.filter(i => /{([^}]+)}/g.exec(i) === null)
|
||||
|
||||
// remove utterances which are duplicated with local qna questions
|
||||
let questionsOfLowerCase = questions.map(q => q.toLowerCase())
|
||||
let dedupedUtterances = utterancesWithoutPatterns.filter(u => !questionsOfLowerCase.includes(u.toLowerCase()))
|
||||
|
||||
// add utterances from lu file to corresponding qna file with question set to all utterances
|
||||
// split large QA pair to multiple smaller ones to overcome the limit that the maximum number of questions per answer is 300
|
||||
while (dedupedUtterances.length > 0) {
|
||||
let subDedupedUtterances = dedupedUtterances.splice(0, MAX_QUESTIONS_PER_ANSWER)
|
||||
// construct new question content for qna resource
|
||||
let utterancesContent = subDedupedUtterances.join(NEWLINE + '- ')
|
||||
let utterancesToQuestion = `${NEWLINE}${crossTrainingComments}${NEWLINE}> !# @qna.pair.source = crosstrained${NEWLINE}${NEWLINE}# ? ${utterancesContent}${NEWLINE}${NEWLINE}**Filters:**${NEWLINE}- dialogName=${fileName}${NEWLINE}${NEWLINE}\`\`\`${NEWLINE}intent=DeferToRecognizer_LUIS_${fileName}${NEWLINE}\`\`\``
|
||||
trainedQnaResource = new SectionOperator(trainedQnaResource).addSection(utterancesToQuestion)
|
||||
}
|
||||
|
||||
return { luResource: trainedLuResource, qnaResource: trainedQnaResource }
|
||||
}
|
||||
|
||||
const qnaAddMetaData = function (qnaResource, fileName) {
|
||||
let resultQnaResource = qnaResource
|
||||
// extract qna sections
|
||||
const qnaSections = qnaResource.Sections.filter(s => s.SectionType === LUSectionTypes.QNASECTION)
|
||||
|
||||
// update qna filters
|
||||
let qnaSectionContents = []
|
||||
for (const qnaSection of qnaSections) {
|
||||
qnaSection.FilterPairs.push({ key: 'dialogName', value: fileName })
|
||||
let qnaSectionContent = ''
|
||||
if (qnaSection.source !== QNA_GENERIC_SOURCE) {
|
||||
qnaSectionContent += `> !# @qna.pair.source = ${qnaSection.source}${NEWLINE}${NEWLINE}`
|
||||
}
|
||||
|
||||
if (qnaSection.QAPairId) {
|
||||
qnaSectionContent += `<a id = "${qnaSection.QAPairId}"></a>${NEWLINE}${NEWLINE}`
|
||||
}
|
||||
|
||||
qnaSectionContent += `# ? ${Array.from(new Set(qnaSection.Questions)).join(NEWLINE + '- ')}${NEWLINE}${NEWLINE}**Filters:**${NEWLINE}- ${qnaSection.FilterPairs.map(f => f.key + '=' + f.value).join(NEWLINE + '- ')}${NEWLINE}${NEWLINE}\`\`\`${NEWLINE}${qnaSection.Answer}${NEWLINE}\`\`\``
|
||||
|
||||
if (qnaSection.promptsText && qnaSection.promptsText.length > 0) {
|
||||
qnaSectionContent += `${NEWLINE}${NEWLINE}**Prompts:**${NEWLINE}- ${qnaSection.promptsText.join(NEWLINE + '- ')}`
|
||||
}
|
||||
|
||||
qnaSectionContents.push(qnaSectionContent)
|
||||
}
|
||||
|
||||
let qnaContents = qnaSectionContents.join(NEWLINE + NEWLINE)
|
||||
if (qnaContents && qnaContents !== '') {
|
||||
const modelInfoSections = qnaResource.Sections.filter(s => s.SectionType === LUSectionTypes.MODELINFOSECTION)
|
||||
const modelInforContent = modelInfoSections.map(m => m.ModelInfo).join(NEWLINE)
|
||||
if (modelInforContent && modelInforContent !== '') qnaContents = NEWLINE + qnaContents
|
||||
|
||||
resultQnaResource = new SectionOperator(new LUResource([], modelInforContent, [])).addSection(qnaContents)
|
||||
}
|
||||
|
||||
return resultQnaResource
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse and validate lu or qna object array to convert to LUResource object dict
|
||||
* @param {luObject[]} objectArray the lu or qna object list to be parsed
|
||||
* @param {boolean} verbose indicate to enable log messages or not
|
||||
* @param {any} importResolver import Resolver when resolving import files
|
||||
* @returns {Map<string, LUResource>} map of file id and luResource
|
||||
* @throws {exception} throws errors
|
||||
*/
|
||||
const parseAndValidateContent = async function (objectArray, verbose, importResolver) {
|
||||
let fileIdToResourceMap = new Map()
|
||||
let allEmpty = true
|
||||
for (const object of objectArray) {
|
||||
let fileContent = object.content
|
||||
if (object.content && object.content !== '') {
|
||||
if (object.id.toLowerCase().endsWith(fileExtEnum.LUFile)) {
|
||||
let result = await LuisBuilderVerbose.build([object], verbose, undefined, importResolver)
|
||||
let luisObj = new Luis(result)
|
||||
fileContent = luisObj.parseToLuContent()
|
||||
} else {
|
||||
let result = await qnaBuilderVerbose.build([object], verbose, importResolver)
|
||||
fileContent = result.parseToQnAContent()
|
||||
}
|
||||
}
|
||||
|
||||
let resource = luParser.parse(fileContent)
|
||||
|
||||
if (resource.Sections.filter(s => s.SectionType !== LUSectionTypes.MODELINFOSECTION).length > 0) allEmpty = false
|
||||
|
||||
if (resource.Errors && resource.Errors.length > 0) {
|
||||
if (verbose) {
|
||||
var warns = resource.Errors.filter(error => (error && error.Severity && error.Severity === DiagnosticSeverity.WARN))
|
||||
if (warns.length > 0) {
|
||||
process.stdout.write(warns.map(warn => warn.toString()).join(NEWLINE).concat(NEWLINE))
|
||||
}
|
||||
}
|
||||
|
||||
var errors = resource.Errors.filter(error => (error && error.Severity && error.Severity === DiagnosticSeverity.ERROR))
|
||||
if (errors.length > 0) {
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT_FILE, `Invlid file ${object.Id}: ${errors.map(error => error.toString()).join(NEWLINE)}`))
|
||||
}
|
||||
}
|
||||
|
||||
fileIdToResourceMap.set(object.id, resource)
|
||||
}
|
||||
|
||||
return {fileIdToResourceMap, allEmpty}
|
||||
}
|
||||
|
||||
const pretreatment = function (luContents, qnaContents) {
|
||||
// Parse lu and qna objects
|
||||
let luObjectArray = fileHelper.getParsedObjects(luContents)
|
||||
let qnaObjectArray = fileHelper.getParsedObjects(qnaContents)
|
||||
|
||||
return {luObjectArray, qnaObjectArray}
|
||||
}
|
||||
|
||||
const patternWithPrebuiltEntity = function (utterance) {
|
||||
let patternAnyEntity
|
||||
let matchedEntity = /{([^}]+)}/g.exec(utterance)
|
||||
|
||||
if (matchedEntity !== null) {
|
||||
patternAnyEntity = matchedEntity[1].trim()
|
||||
|
||||
if (patternAnyEntity && patternAnyEntity.startsWith('@')) {
|
||||
patternAnyEntity = patternAnyEntity.slice(1).trim()
|
||||
}
|
||||
|
||||
let patternAnyEntityWithRole = patternAnyEntity.split(':')
|
||||
if (patternAnyEntityWithRole.length > 1) {
|
||||
patternAnyEntity = patternAnyEntityWithRole[0].trim()
|
||||
}
|
||||
|
||||
if (prebuiltEntityTypes.includes(patternAnyEntity)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
/**
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
const modules = {
|
||||
parser: {
|
||||
parseFile: require('./lufile/parseFileContents').parseFile,
|
||||
validateLUISBlob: require('./luis/luisValidator')
|
||||
},
|
||||
refresh: {
|
||||
constructMdFromLUIS: require('./luis/luConverter'),
|
||||
constructMdFromQnA: require('./qna/qnamaker/qnaConverter'),
|
||||
constructMdFromQnAAlteration: require('./qna/alterations/qnaConverter')
|
||||
},
|
||||
translate: {
|
||||
parseAndTranslate: require('./lufile/translate-helpers').parseAndTranslate,
|
||||
translateText: require('./lufile/translate-helpers').translateText
|
||||
},
|
||||
helperEnums: {
|
||||
errorCodes: require('./utils/enums/CLI-errors').errorCode,
|
||||
parseCommands: require('./utils/enums/parsecommands'),
|
||||
},
|
||||
helperClasses: {
|
||||
Exception: require('./utils/exception'),
|
||||
LUIS: require('./luis/luis'),
|
||||
QnA: require('./qna/qnamaker/qnamaker'),
|
||||
|
||||
Parser: require('./lufile/classes/parserObject')
|
||||
},
|
||||
V2 : {
|
||||
Luis: require('./luis/luis'),
|
||||
LU: require('./lu/lu'),
|
||||
LuisBuilder: require('./luis/luisBuilder'),
|
||||
QNA: require('./lu/qna'),
|
||||
QnAMaker: require('./qna/qnamaker/qnamaker'),
|
||||
KB: require('./qna/qnamaker/kb'),
|
||||
Alterations: require('./qna/alterations/alterations'),
|
||||
QnAMakerBuilder: require('./qna/qnamaker/qnaMakerBuilder'),
|
||||
Exception: require('./utils/exception'),
|
||||
LUOptions: require('./lu/luOptions'),
|
||||
Utils: {
|
||||
ReadTextFile: require('./lufile/read-text-file').readSync
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = modules;
|
|
@ -0,0 +1,29 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const translateHelpers = require('./../lufile/translate-helpers')
|
||||
const luOptions = require('./luOptions')
|
||||
|
||||
class Lu {
|
||||
constructor(content, options = new luOptions){
|
||||
this.content = content
|
||||
this.id = options.id ? options.id : ''
|
||||
this.includeInCollate = options.includeInCollate !== undefined ? options.includeInCollate : true
|
||||
this.language = options.language ? options.language : ''
|
||||
this.path = options.path ? options.path : ''
|
||||
|
||||
if (this.language !== '') {
|
||||
this.name = this.id + '.' + this.language + '.lu'
|
||||
} else {
|
||||
this.name = this.id + '.lu'
|
||||
}
|
||||
}
|
||||
|
||||
async translate(translate_key, tgt_lang, translate_comments = false, translate_link_text = false){
|
||||
this.content = await translateHelpers.parseAndTranslate(this.content, translate_key, tgt_lang, '', translate_comments, translate_link_text)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Lu
|
|
@ -0,0 +1,566 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const parseFileContents = require('./../lufile/parseFileContents');
|
||||
const retCode = require('./../utils/enums/CLI-errors');
|
||||
const helpers = require('./../utils/helpers');
|
||||
const hClasses = require('./../lufile/classes/hclasses');
|
||||
const exception = require('./../utils/exception');
|
||||
const luObject = require('./lu');
|
||||
const luOptions = require('./luOptions')
|
||||
const parserObject = require('./../lufile/classes/parserObject');
|
||||
const txtfile = require('./../lufile/read-text-file');
|
||||
const BuildDiagnostic = require('./../lufile/diagnostic').BuildDiagnostic;
|
||||
const LUISObjNameEnum = require('./../utils/enums/luisobjenum');
|
||||
const fetch = require('node-fetch');
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
* Merges Lu/QnA files into a parserObject.
|
||||
* @param {Array<Lu>} luObjArray Array of LU/QnA files to be merge
|
||||
* @param {boolean} verbose indicates if we need verbose logging.
|
||||
* @param {string} luis_culture LUIS locale code
|
||||
* @param {function} luSearchFn function to retrieve the lu files found in the references
|
||||
* @returns {parserObject} Object that contains list of parsed LUIS object, list of parsed QnA object and list of parsed QnA Alteration Content
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
Build: async function(luObjArray, verbose, luis_culture, luSearchFn){
|
||||
let allParsedContent = await buildLuJsonObject(luObjArray, verbose, luis_culture, luSearchFn)
|
||||
let refTree = await buildRefTree(allParsedContent, luSearchFn)
|
||||
resolveTreeRefs(refTree, luObjArray);
|
||||
return allParsedContent
|
||||
}
|
||||
}
|
||||
|
||||
const buildRefTree = async function(allParsedContent, luSearchFn) {
|
||||
let refs = {};
|
||||
await Promise.all(allParsedContent.LUISContent.map(async (parserObj, objIdx) => {
|
||||
let luObj = {
|
||||
obj : parserObj.LUISJsonStructure,
|
||||
srcFile : parserObj.srcFile,
|
||||
refs : []
|
||||
};
|
||||
if (refs[parserObj.srcFile] === undefined) {
|
||||
refs[parserObj.srcFile] = {
|
||||
'luis': luObj
|
||||
};
|
||||
} else {
|
||||
if (refs[parsedObj.srcFile].luis === undefined) {
|
||||
ref[parserObj.srcFile].luis = luObj;
|
||||
}
|
||||
}
|
||||
parserObj.LUISJsonStructure.uttHash = {};
|
||||
(parserObj.LUISJsonStructure.utterances || []).forEach(async (utterance, uttIdx) => {
|
||||
parserObj.LUISJsonStructure.uttHash[utterance.text] = '';
|
||||
if (helpers.isUtteranceLinkRef(utterance.text)) {
|
||||
let parsedLinkUri = await helpers.parseLinkURI(utterance.text, parserObj.srcFile, luSearchFn);
|
||||
refs[parserObj.srcFile].luis.refs.push({
|
||||
refId: parsedLinkUri.fileName,
|
||||
uttId: uttIdx,
|
||||
parsedLink: parsedLinkUri,
|
||||
uttObj: utterance,
|
||||
text: utterance.text,
|
||||
type: 'luis'
|
||||
})
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
await Promise.all(allParsedContent.QnAContent.map(async (parserObj, objIdx) => {
|
||||
let qnaObj = {
|
||||
obj : parserObj.qnaJsonStructure,
|
||||
alt : allParsedContent.QnAAlterations[objIdx].qnaAlterations,
|
||||
srcFile : parserObj.srcFile,
|
||||
refs : []
|
||||
}
|
||||
if (refs[parserObj.srcFile] === undefined) {
|
||||
refs[parserObj.srcFile] = {'qna': qnaObj};
|
||||
} else {
|
||||
if (refs[parserObj.srcFile].qna === undefined) {
|
||||
refs[parserObj.srcFile].qna = qnaObj;
|
||||
}
|
||||
}
|
||||
(parserObj.qnaJsonStructure.qnaList.forEach(async qnaPair => {
|
||||
qnaPair.questions.forEach(async (question, qIdx) => {
|
||||
if (helpers.isUtteranceLinkRef(question)) {
|
||||
let parsedLinkUri = await helpers.parseLinkURI(question)
|
||||
refs[parserObj.srcFile].qna.refs.push({
|
||||
refId: parsedLinkUri.fileName,
|
||||
qId: qIdx,
|
||||
text: question,
|
||||
qObj: qnaPair,
|
||||
parsedLink: parsedLinkUri,
|
||||
type: 'qna'
|
||||
})
|
||||
}
|
||||
})
|
||||
}))
|
||||
}));
|
||||
|
||||
return refs;
|
||||
}
|
||||
|
||||
const resolveTreeRefs = function(refTree, luObjArray) {
|
||||
(luObjArray || []).forEach(luObj => {
|
||||
resolveRefs(refTree, luObj.id)
|
||||
})
|
||||
}
|
||||
|
||||
const resolveRefs = function(refTree, srcId) {
|
||||
if (refTree[srcId] !== undefined && refTree[srcId].luis) {
|
||||
// sort by refs
|
||||
refTree[srcId].luis.refs.sort((a, b) => a.uttId - b.uttId)
|
||||
|
||||
refTree[srcId].luis.refs.forEach((ref, rIdx) => {
|
||||
|
||||
if (ref.IdsVisited === undefined) {
|
||||
ref.IdsVisited = [srcId];
|
||||
} else {
|
||||
if(ref.IdsVisited.includes(srcId)) {
|
||||
// throw
|
||||
let error = BuildDiagnostic({
|
||||
message: `Loop detected for reference '${ref.text}' as ${ref.IdsVisited} -> ${srcId}`
|
||||
});
|
||||
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString()));
|
||||
} else {
|
||||
ref.IdsVisited.push(srcId);
|
||||
}
|
||||
}
|
||||
|
||||
let result = resolveRefByType(srcId, ref, refTree)
|
||||
|
||||
// process utterances and patterns.
|
||||
let luObj = refTree[srcId].luis.obj;
|
||||
|
||||
// remove the reference utterance
|
||||
luObj.utterances.splice((ref.uttId - rIdx), 1);
|
||||
|
||||
// add new utterances
|
||||
if (result.utterances !== undefined) {
|
||||
result.utterances.forEach(utt => {
|
||||
if (luObj.uttHash[utt] === undefined) {
|
||||
luObj.utterances.push(new hClasses.uttereances(utt, ref.uttObj.intent));
|
||||
luObj.uttHash[utt] = '';
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (result.patterns !== undefined) {
|
||||
// add new patterns
|
||||
result.patterns.forEach(patt => {
|
||||
luObj.patterns.push(new hClasses.pattern(patt, ref.uttObj.intent));
|
||||
if(!patt.includes('{')) return
|
||||
handlePatternAnyEntity(patt, luObj);
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
delete refTree[srcId].luis.obj.uttHash;
|
||||
}
|
||||
|
||||
if (refTree[srcId] !== undefined && refTree[srcId].qna) {
|
||||
// Handle qna refs
|
||||
(refTree[srcId].qna.refs || []).forEach((ref, rIdx) => {
|
||||
let result = resolveRefByType(srcId, ref, refTree)
|
||||
|
||||
if (result.patterns && result.patterns.length !== 0) {
|
||||
// throw
|
||||
let error = BuildDiagnostic({
|
||||
message: `Unable to parse ${ref.q} in file: ${srcId}. References cannot pull in patterns. Consider '*utterances*' suffix if you are looking to pull in only utteranes`
|
||||
});
|
||||
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString()));
|
||||
}
|
||||
|
||||
// process utterances
|
||||
let qObj = ref.qObj;
|
||||
// remove the reference utterance
|
||||
qObj.questions.splice((ref.qId - rIdx), 1);
|
||||
// add new utterances
|
||||
result.utterances.forEach(utt => qObj.questions.push(utt));
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const handlePatternAnyEntity = function(patt, luObj) {
|
||||
let entityRegex = new RegExp(/\{(.*?)\}/g);
|
||||
let entitiesFound = patt.match(entityRegex);
|
||||
|
||||
entitiesFound.forEach(function (entity) {
|
||||
entity = entity.replace("{", "").replace("}", "");
|
||||
let entityName = entity;
|
||||
let roleName = '';
|
||||
if (entity.includes(':')) {
|
||||
// this is an entity with role
|
||||
[entityName, roleName] = entity.split(':');
|
||||
}
|
||||
// insert the entity only if it does not already exist
|
||||
let paIdx = -1;
|
||||
let patternAnyInMaster = luObj.patternAnyEntities.find((item, idx) => {
|
||||
if (item.name === entityName) {
|
||||
paIdx = idx;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
// insert the entity only if it does not already exist
|
||||
if (isNewEntity(luObj, entityName)) {
|
||||
if (!patternAnyInMaster && roleName !== '') {
|
||||
parseFileContents.addItemOrRoleIfNotPresent(luObj, LUISObjNameEnum.PATTERNANYENTITY, entityName, [roleName])
|
||||
return
|
||||
}
|
||||
|
||||
if (!patternAnyInMaster) {
|
||||
parseFileContents.addItemIfNotPresent(luObj, LUISObjNameEnum.PATTERNANYENTITY, entity);
|
||||
return
|
||||
}
|
||||
// add the role if it does not exist already.
|
||||
if (roleName !== '') {
|
||||
!patternAnyInMaster.roles.includes(roleName) ? patternAnyInMaster.roles.push(roleName) : undefined;
|
||||
}
|
||||
return
|
||||
}
|
||||
// we found this pattern.any entity as another type.
|
||||
if (patternAnyInMaster && paIdx !== -1) {
|
||||
// remove the patternAny entity from the list because it has been explicitly defined elsewhere.
|
||||
luObj.patternAnyEntities.splice(paIdx, 1);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const resolveQuestionRef = function(srcId, ref, refTree) {
|
||||
let utterances = [];
|
||||
let patterns = [];
|
||||
let srcFile = refTree[srcId][ref.type].srcFile;
|
||||
let newId = path.resolve(path.dirname(srcFile ? srcFile : ''), ref.parsedLink.fileName);
|
||||
let tgtId = (refTree[ref.parsedLink.fileName] && refTree[ref.parsedLink.fileName].qna !== undefined) ? ref.parsedLink.fileName : undefined;
|
||||
tgtId = (tgtId === undefined && refTree[newId] !== undefined && refTree[newId].qna !== undefined) ? newId : tgtId;
|
||||
let tgtObj = refTree[ref.parsedLink.fileName] || refTree[newId] || undefined;
|
||||
if (!tgtObj && !ref.parsedLink.fileName.endsWith('*')) {
|
||||
let error = BuildDiagnostic({
|
||||
message: `Unable to parse ${ref.text} in file: ${srcFile}. Cannot find reference.`
|
||||
});
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString()));
|
||||
}
|
||||
// Resolve additional references if any in tgt obj
|
||||
if (tgtObj && ((tgtObj.luis && tgtObj.luis.refs.length !== 0) || (tgtObj.qna && tgtObj.qna.refs.length !== 0)))
|
||||
resolveRefs(refTree, tgtId);
|
||||
let parseLCasePath = ref.parsedLink.path.toLowerCase();
|
||||
let qnaObj = tgtObj && tgtObj.qna && tgtObj.qna.obj ? tgtObj.qna.obj : undefined;
|
||||
let qnaAlt = tgtObj && tgtObj.qna && tgtObj.qna.alt ? tgtObj.qna.alt : undefined;
|
||||
let parsedQnABlobs = qnaObj !== undefined ? [qnaObj] : [];
|
||||
let parsedQnAAlterations = qnaAlt !== undefined ? [qnaAlt] : [];
|
||||
if (ref.parsedLink.fileName.endsWith('*')) {
|
||||
// this notation is only valid with file path. So try as file path.
|
||||
let tPath = ref.parsedLink.fileName.replace(/\*/g, '');
|
||||
for (let prop in refTree) {
|
||||
if (prop.startsWith(path.resolve(path.dirname(srcFile), tPath))) {
|
||||
parsedQnABlobs.push(refTree[prop].qna.obj);
|
||||
parsedQnAAlterations.push(refTree[prop].qna.alt)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (parseLCasePath.startsWith('*answers*')) {
|
||||
parsedQnABlobs.forEach(blob => blob.qnaList.forEach(item => utterances.push(item.answer)));
|
||||
} else if (ref.parsedLink.path.length > 1 && parseLCasePath.startsWith('?') && parseLCasePath.endsWith('?')) {
|
||||
let itemsFound = undefined;
|
||||
let testQuestion = ref.parsedLink.path.replace(/\?/g, '').replace(/-/g, ' ').trim();
|
||||
// find the specific question
|
||||
parsedQnABlobs.forEach(blob => {
|
||||
if (itemsFound) return;
|
||||
itemsFound = blob.qnaList.find(item => item.questions.includes(testQuestion));
|
||||
})
|
||||
if (itemsFound) {
|
||||
itemsFound.questions.forEach(question => utterances.push(question));
|
||||
}
|
||||
} else if (parseLCasePath.startsWith('*alterations*')) {
|
||||
parsedQnAAlterations.forEach(blob => blob.wordAlterations.forEach(item => item.alterations.forEach(alter => utterances.push(alter))));
|
||||
} else if (parseLCasePath.startsWith('$') && parseLCasePath.endsWith('?')) {
|
||||
// specific alteration to find
|
||||
let alterToFind = ref.parsedLink.path.replace(/[$\?]/g, '').trim();
|
||||
parsedQnAAlterations.forEach(blob => blob.wordAlterations.forEach(item => {
|
||||
if (item.alterations.includes(alterToFind)) {
|
||||
item.alterations.forEach(alter => utterances.push(alter));
|
||||
}
|
||||
}));
|
||||
} else {
|
||||
parsedQnABlobs.forEach(blob => blob.qnaList.forEach(item => item.questions.forEach(question => utterances.push(question))));
|
||||
}
|
||||
return {utterances, patterns}
|
||||
}
|
||||
|
||||
const resolveUttAndPattRef = function(srcId, ref, refTree) {
|
||||
let utterances = [];
|
||||
let patterns = [];
|
||||
let srcFile = refTree[srcId][ref.type].srcFile;
|
||||
let newId = path.resolve(path.dirname(srcFile ? srcFile : ''), ref.parsedLink.fileName)
|
||||
let tgtId = (refTree[ref.parsedLink.fileName] && refTree[ref.parsedLink.fileName].luis !== undefined) ? ref.parsedLink.fileName : undefined;
|
||||
tgtId = (tgtId === undefined && refTree[newId] !== undefined && refTree[newId].luis !== undefined) ? newId : tgtId;
|
||||
let tgtObj = refTree[ref.parsedLink.fileName] || refTree[newId] || undefined;
|
||||
if (!tgtObj) {
|
||||
let error = BuildDiagnostic({
|
||||
message: `Unable to parse ${ref.text} in file: ${srcFile}. Cannot find reference.`
|
||||
});
|
||||
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString()));
|
||||
}
|
||||
// Resolve additional references if any in tgt obj
|
||||
if ((tgtObj.luis && tgtObj.luis.refs.length !==0) || (tgtObj.qna && tgtObj.qna.refs.length !==0)) resolveRefs(refTree, tgtId)
|
||||
|
||||
let parseLCasePath = ref.parsedLink.path.toLowerCase();
|
||||
let luisObj = tgtObj.luis.obj;
|
||||
let referenceIntent = undefined;
|
||||
if (parseLCasePath.endsWith('*utterancesandpatterns*')) {
|
||||
// get utterance list from reference intent and update list
|
||||
referenceIntent = parseLCasePath.replace(/-/g, ' ').replace('*utterancesandpatterns*', '').trim();
|
||||
} else if (parseLCasePath.endsWith('*utterances*')) {
|
||||
// get utterance list from reference intent and update list
|
||||
referenceIntent = parseLCasePath.replace(/-/g, ' ').replace('*utterances*', '').trim();
|
||||
patterns = undefined;
|
||||
} else if (parseLCasePath.endsWith('*patterns*')) {
|
||||
// get utterance list from reference intent and update list
|
||||
referenceIntent = parseLCasePath.replace(/-/g, ' ').replace('*patterns*', '').trim();
|
||||
utterances = undefined;
|
||||
} else if (parseLCasePath.endsWith('*')) {
|
||||
referenceIntent = undefined;
|
||||
} else {
|
||||
// get utterance list from reference intent and update list
|
||||
referenceIntent = parseLCasePath.replace(/-/g, ' ').trim();
|
||||
}
|
||||
if (utterances !== undefined) {
|
||||
luisObj.utterances.forEach(item => {
|
||||
if (referenceIntent !== undefined && referenceIntent !== "") {
|
||||
if (item.intent.toLowerCase() === referenceIntent.toLowerCase()) {
|
||||
utterances.push(item.text)
|
||||
}
|
||||
} else {
|
||||
utterances.push(item.text)
|
||||
}
|
||||
})
|
||||
}
|
||||
if (patterns !== undefined) {
|
||||
luisObj.patterns.forEach(patt => {
|
||||
if (referenceIntent !== undefined && referenceIntent !== "") {
|
||||
if (patt.intent.toLowerCase() === referenceIntent.toLowerCase()) {
|
||||
patterns.push(patt.pattern)
|
||||
}
|
||||
} else {
|
||||
patterns.push(patt.pattern)
|
||||
}
|
||||
})
|
||||
}
|
||||
return {utterances, patterns};
|
||||
}
|
||||
|
||||
const resolveRefByType = function(srcId, ref, refTree) {
|
||||
let filter = ref.parsedLink.path.endsWith('?') ? resolveQuestionRef : resolveUttAndPattRef
|
||||
return filter(srcId, ref, refTree);
|
||||
}
|
||||
|
||||
const buildLuJsonObject = async function(luObjArray, log, luis_culture, luSearchFn = resolveLuContent){
|
||||
let allParsedLUISContent = []
|
||||
let allParsedQnAContent = []
|
||||
let allParsedAlterationsContent = []
|
||||
let filesToParse = Array.from(luObjArray)
|
||||
let parsedFiles = []
|
||||
while (filesToParse.length > 0) {
|
||||
let luOb = filesToParse[0]
|
||||
// skip this file if we have parsed it already
|
||||
if (parsedFiles.includes(luOb.id)) {
|
||||
filesToParse.splice(0,1)
|
||||
continue
|
||||
}
|
||||
|
||||
let parsedContent = await parseLuFile(luOb, log, luis_culture)
|
||||
parsedFiles.push(luOb.id)
|
||||
|
||||
// Fix for BF-CLI #620
|
||||
// We do not perform validation here. for parseFile V1 API route,
|
||||
// the recommendation is to call validate() after parse.
|
||||
if (haveLUISContent(parsedContent.LUISJsonStructure)) {
|
||||
allParsedLUISContent.push(parserObject.create(parsedContent.LUISJsonStructure, undefined, undefined, luOb.id, luOb.includeInCollate))
|
||||
}
|
||||
|
||||
allParsedQnAContent.push(parserObject.create(undefined, parsedContent.qnaJsonStructure, undefined, luOb.id, luOb.includeInCollate))
|
||||
allParsedAlterationsContent.push(parserObject.create(undefined, undefined, parsedContent.qnaAlterations, luOb.id, luOb.includeInCollate))
|
||||
// remove this file from the list
|
||||
filesToParse.splice(0,1)
|
||||
|
||||
// add additional files to parse to the list
|
||||
if(parsedContent.additionalFilesToParse.length <= 0) {
|
||||
continue
|
||||
}
|
||||
|
||||
let foundLuFiles = await luSearchFn(luOb.id, parsedContent.additionalFilesToParse)
|
||||
for( let i = 0; i < foundLuFiles.length; i++){
|
||||
if (parsedFiles.includes(foundLuFiles[i].id)) {
|
||||
let duplicated = foundLuFiles.splice(i--, 1)
|
||||
updateParsedFiles(allParsedLUISContent, allParsedQnAContent, allParsedAlterationsContent, duplicated)
|
||||
}
|
||||
}
|
||||
|
||||
filesToParse = filesToParse.concat(foundLuFiles)
|
||||
}
|
||||
return {
|
||||
LUISContent: allParsedLUISContent,
|
||||
QnAContent: allParsedQnAContent,
|
||||
QnAAlterations: allParsedAlterationsContent
|
||||
}
|
||||
}
|
||||
const resolveLuContent = async function(srcId, idsToFind){
|
||||
let luObjects = [];
|
||||
for(let idx = 0; idx < idsToFind.length; idx++) {
|
||||
let toResolve = idsToFind[idx];
|
||||
if (isUrl(toResolve.filePath)) {
|
||||
await resolveLuUriContent(srcId, toResolve, luObjects);
|
||||
} else {
|
||||
resolveLuFileContent(toResolve, luObjects, srcId);
|
||||
}
|
||||
}
|
||||
return luObjects;
|
||||
}
|
||||
const resolveLuFileContent = function(file, luObjects, srcId) {
|
||||
let parentFilePath = srcId === 'stdin' ? process.cwd() : path.parse(path.resolve(srcId)).dir
|
||||
// Support wild cards at the end of a relative .LU file path.
|
||||
// './bar/*' should look for all .lu files under the specified folder.
|
||||
// './bar/**' should recursively look for .lu files under sub-folders as well.
|
||||
if(file.filePath.endsWith('*')) {
|
||||
const isRecursive = file.filePath.endsWith('**')
|
||||
const rootFolder = file.filePath.replace(/\*/g, '')
|
||||
let rootPath = rootFolder;
|
||||
if(!path.isAbsolute(rootFolder)) {
|
||||
rootPath = path.resolve(parentFilePath, rootFolder);
|
||||
}
|
||||
// Get LU files in this location
|
||||
const luFilesToAdd = helpers.findLUFiles(rootPath, isRecursive);
|
||||
// add these to filesToParse
|
||||
for(let f = 0; f < luFilesToAdd.length; f++){
|
||||
const opts = new luOptions(luFilesToAdd[f], file.includeInCollate)
|
||||
luObjects.push(new luObject(readLuFile(luFilesToAdd[f]), opts))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if(!path.isAbsolute(file.filePath)) {
|
||||
file.filePath = path.resolve(parentFilePath, file.filePath)
|
||||
}
|
||||
// find matching parsed files and ensure includeInCollate is updated if needed.
|
||||
luObjects.push(new luObject(readLuFile(file.filePath), new luOptions(file.filePath, file.includeInCollate)))
|
||||
}
|
||||
const resolveLuUriContent = async function(srcId, toResolve, luObjects) {
|
||||
let uri = toResolve.filePath || undefined;
|
||||
if (uri !== undefined) {
|
||||
let response;
|
||||
try {
|
||||
response = await fetch(uri, { method: 'GET' });
|
||||
} catch (err) {
|
||||
// throw, invalid URI
|
||||
let errorMsg = `URI: "${uri}" appears to be invalid. Please double check the URI or re-try this parse when you are connected to the internet.`;
|
||||
let error = BuildDiagnostic({
|
||||
message: errorMsg,
|
||||
range: luImport.Range
|
||||
})
|
||||
|
||||
throw (new exception(retCode.errorCode.INVALID_URI, error.toString(), [error]));
|
||||
}
|
||||
var res = await response.buffer();
|
||||
var encodedRes = helpers.fixBuffer(res);
|
||||
luObjects.push(new luObject(encodedRes, new luOptions(toResolve.filePath, toResolve.includeInCollate)));
|
||||
}
|
||||
}
|
||||
const isUrl = function(path) {
|
||||
try {
|
||||
new URL(path);
|
||||
return true;
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
const updateParsedFiles = function(allParsedLUISContent, allParsedQnAContent, allParsedAlterationsContent, luobject) {
|
||||
// find the instance and ensure includeInCollate property is set correctly
|
||||
if(luobject.includeInCollate) {
|
||||
let matchInLUIS = allParsedLUISContent.find(item => item.srcFile == luobject.id);
|
||||
if(matchInLUIS) matchInLUIS.includeInCollate = true;
|
||||
|
||||
let matchInQnA = allParsedQnAContent.find(item => item.srcFile == luobject.id);
|
||||
if(matchInQnA) matchInQnA.includeInCollate = true;
|
||||
|
||||
let matchInAlterations = allParsedAlterationsContent.find(item => item.srcFile == luobject.id);
|
||||
if(matchInAlterations) matchInAlterations.includeInCollate = true;
|
||||
}
|
||||
}
|
||||
|
||||
const haveLUISContent = function(blob) {
|
||||
if(!blob) return false;
|
||||
return ((blob[LUISObjNameEnum.INTENT].length > 0) ||
|
||||
(blob[LUISObjNameEnum.ENTITIES].length > 0) ||
|
||||
(blob[LUISObjNameEnum.CLOSEDLISTS].length > 0) ||
|
||||
(blob[LUISObjNameEnum.PATTERNANYENTITY].length > 0) ||
|
||||
(blob.patterns.length > 0) ||
|
||||
(blob[LUISObjNameEnum.UTTERANCE].length > 0) ||
|
||||
(blob.prebuiltEntities.length > 0) ||
|
||||
(blob[LUISObjNameEnum.REGEX].length > 0) ||
|
||||
(blob.model_features && blob.model_features.length > 0) ||
|
||||
(blob.phraselists && blob.phraselists.length > 0) ||
|
||||
(blob.composites.length > 0));
|
||||
}
|
||||
|
||||
const isNewEntity = function(luisModel, entityName){
|
||||
let simpleEntityInMaster = luisModel.entities.find(item => item.name == entityName);
|
||||
let compositeInMaster = luisModel.composites.find(item => item.name == entityName);
|
||||
let listEntityInMaster = luisModel.closedLists.find(item => item.name == entityName);
|
||||
let regexEntityInMaster = luisModel.regex_entities.find(item => item.name == entityName);
|
||||
let prebuiltInMaster = luisModel.prebuiltEntities.find(item => item.name == entityName);
|
||||
|
||||
return !simpleEntityInMaster &&
|
||||
!compositeInMaster &&
|
||||
!listEntityInMaster &&
|
||||
!regexEntityInMaster &&
|
||||
!prebuiltInMaster
|
||||
}
|
||||
|
||||
const readLuFile = function(file) {
|
||||
if(!fs.existsSync(path.resolve(file))) {
|
||||
let error = BuildDiagnostic({
|
||||
message: `Sorry unable to open [${file}]`
|
||||
});
|
||||
throw(new exception(retCode.errorCode.FILE_OPEN_ERROR, error.toString()));
|
||||
}
|
||||
|
||||
let fileContent = txtfile.readSync(file);
|
||||
if (!fileContent) {
|
||||
let error = BuildDiagnostic({
|
||||
message: `Sorry, error reading file: ${file}`
|
||||
});
|
||||
throw(new exception(retCode.errorCode.FILE_OPEN_ERROR, error.toString()));
|
||||
}
|
||||
return fileContent
|
||||
}
|
||||
|
||||
const parseLuFile = async function(luOb, log, luis_culture) {
|
||||
let parsedContent = ''
|
||||
if (!luOb.content) {
|
||||
let error = BuildDiagnostic({ message: `Cannot parse empty ${luOb.id}. Please add content to the file or remove it.` })
|
||||
throw(new exception(retCode.errorCode.INVALID_INPUT_FILE, error.toString()));
|
||||
}
|
||||
try {
|
||||
parsedContent = await parseFileContents.parseFile(luOb.content, log, luis_culture);
|
||||
} catch (err) {
|
||||
err.source = luOb.id;
|
||||
throw(err);
|
||||
}
|
||||
if (!parsedContent) {
|
||||
let error = BuildDiagnostic({
|
||||
message: `Sorry, file ${luOb.id} had invalid content`
|
||||
});
|
||||
throw(new exception(retCode.errorCode.INVALID_INPUT_FILE, error.toString()));
|
||||
}
|
||||
return parsedContent
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
class LuOptions {
|
||||
constructor(id = '', includeInCollate = true, language = '', path = ''){
|
||||
this.id = id ? id : get_guid()
|
||||
this.includeInCollate = includeInCollate
|
||||
this.language = language
|
||||
this.path = path
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LuOptions
|
||||
|
||||
/**
|
||||
* Helper function to create a random guid
|
||||
* @returns {string} GUID
|
||||
*/
|
||||
const get_guid = function () {
|
||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
||||
var r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
|
||||
return v.toString(16);
|
||||
});
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const translateHelpers = require('./../lufile/translate-helpers')
|
||||
const qnaOptions = require('./qnaOptions')
|
||||
|
||||
class Qna {
|
||||
constructor(content, options = new qnaOptions()){
|
||||
this.content = content
|
||||
this.id = options.id ? options.id : ''
|
||||
this.includeInCollate = options.includeInCollate !== undefined ? options.includeInCollate : true
|
||||
this.language = options.language ? options.language : ''
|
||||
this.path = options.path ? options.path : ''
|
||||
|
||||
if (this.language !== '') {
|
||||
this.name = this.id + '.' + this.language + '.qna'
|
||||
} else {
|
||||
this.name = this.id + '.qna'
|
||||
}
|
||||
}
|
||||
|
||||
async translate(translate_key, tgt_lang, translate_comments = false, translate_link_text = false){
|
||||
this.content = await translateHelpers.parseAndTranslate(this.content, translate_key, tgt_lang, '', translate_comments, translate_link_text, false)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Qna
|
|
@ -0,0 +1,21 @@
|
|||
class QnAOptions {
|
||||
constructor(id = '', includeInCollate = true, language = '', path = ''){
|
||||
this.id = id ? id : get_guid()
|
||||
this.includeInCollate = includeInCollate
|
||||
this.language = language
|
||||
this.path = path
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = QnAOptions
|
||||
|
||||
/**
|
||||
* Helper function to create a random guid
|
||||
* @returns {string} GUID
|
||||
*/
|
||||
const get_guid = function () {
|
||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
||||
var r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
|
||||
return v.toString(16);
|
||||
});
|
||||
}
|
|
@ -0,0 +1,503 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
import {LuBuildCore} from './core'
|
||||
import {Settings} from './settings'
|
||||
import {MultiLanguageRecognizer} from './multi-language-recognizer'
|
||||
import {Recognizer} from './recognizer'
|
||||
import {CrossTrainedRecognizer} from './cross-trained-recognizer'
|
||||
const path = require('path')
|
||||
const fs = require('fs-extra')
|
||||
const delay = require('delay')
|
||||
const fileHelper = require('./../../utils/filehelper')
|
||||
const fileExtEnum = require('./../utils/helpers').FileExtTypeEnum
|
||||
const retCode = require('./../utils/enums/CLI-errors')
|
||||
const exception = require('./../utils/exception')
|
||||
const LuisBuilderVerbose = require('./../luis/luisCollate')
|
||||
const LuisBuilder = require('./../luis/luisBuilder')
|
||||
const Luis = require('./../luis/luis')
|
||||
const LUOptions = require('./../lu/luOptions')
|
||||
const Content = require('./../lu/lu')
|
||||
const recognizerType = require('./../utils/enums/recognizertypes')
|
||||
|
||||
export class Builder {
|
||||
private readonly handler: (input: string) => any
|
||||
|
||||
constructor(handler: any) {
|
||||
this.handler = handler
|
||||
}
|
||||
|
||||
async loadContents(
|
||||
files: string[],
|
||||
culture: string,
|
||||
suffix: string,
|
||||
region: string,
|
||||
schema?: string,
|
||||
importResolver?: object) {
|
||||
let multiRecognizers = new Map<string, MultiLanguageRecognizer>()
|
||||
let settings: any
|
||||
let recognizers = new Map<string, Recognizer>()
|
||||
let luContents: Array<any> = []
|
||||
let crosstrainedRecognizers = new Map<string, CrossTrainedRecognizer>()
|
||||
|
||||
for (const file of files) {
|
||||
let fileCulture: string
|
||||
let fileName: string
|
||||
|
||||
let cultureFromPath = fileHelper.getCultureFromPath(file)
|
||||
if (cultureFromPath) {
|
||||
fileCulture = cultureFromPath
|
||||
let fileNameWithCulture = path.basename(file, path.extname(file))
|
||||
fileName = fileNameWithCulture.substring(0, fileNameWithCulture.length - fileCulture.length - 1)
|
||||
} else {
|
||||
fileCulture = culture
|
||||
fileName = path.basename(file, path.extname(file))
|
||||
}
|
||||
|
||||
const fileFolder = path.dirname(file)
|
||||
const crossTrainedFileName = fileName + '.lu.qna.dialog'
|
||||
const crossTrainedRecognizerPath = path.join(fileFolder, crossTrainedFileName)
|
||||
if (!crosstrainedRecognizers.has(fileName)) {
|
||||
let crosstrainedRecognizerContent = []
|
||||
let crosstrainedRecognizerSchema = schema
|
||||
if (fs.existsSync(crossTrainedRecognizerPath)) {
|
||||
let crosstrainedRecognizerObject = JSON.parse(await fileHelper.getContentFromFile(crossTrainedRecognizerPath))
|
||||
crosstrainedRecognizerContent = crosstrainedRecognizerObject.recognizers
|
||||
crosstrainedRecognizerSchema = crosstrainedRecognizerSchema || crosstrainedRecognizerObject.$schema
|
||||
this.handler(`${crossTrainedRecognizerPath} loaded\n`)
|
||||
}
|
||||
|
||||
crosstrainedRecognizers.set(fileName, new CrossTrainedRecognizer(crossTrainedRecognizerPath, crosstrainedRecognizerContent, crosstrainedRecognizerSchema as string))
|
||||
}
|
||||
|
||||
let fileContent = ''
|
||||
let result
|
||||
let luisObj
|
||||
let luFiles = await fileHelper.getLuObjects(undefined, file, true, fileExtEnum.LUFile)
|
||||
this.handler(`${file} loaded\n`)
|
||||
|
||||
// filter empty lu files
|
||||
luFiles = luFiles.filter((file: any) => file.content !== '')
|
||||
if (luFiles.length <= 0) continue
|
||||
|
||||
try {
|
||||
result = await LuisBuilderVerbose.build(luFiles, true, fileCulture, importResolver)
|
||||
luisObj = new Luis(result)
|
||||
fileContent = luisObj.parseToLuContent()
|
||||
} catch (err) {
|
||||
if (err.source) {
|
||||
err.text = `Invalid LU file ${err.source}: ${err.text}`
|
||||
} else {
|
||||
err.text = `Invalid LU file ${file}: ${err.text}`
|
||||
}
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT_FILE, err.text))
|
||||
}
|
||||
|
||||
const multiRecognizerPath = path.join(fileFolder, `${fileName}.lu.dialog`)
|
||||
if (!multiRecognizers.has(fileName)) {
|
||||
let multiRecognizerContent = {}
|
||||
let multiRecognizerSchema = schema
|
||||
if (fs.existsSync(multiRecognizerPath)) {
|
||||
let multiRecognizerObject = JSON.parse(await fileHelper.getContentFromFile(multiRecognizerPath))
|
||||
multiRecognizerContent = multiRecognizerObject.recognizers
|
||||
multiRecognizerSchema = multiRecognizerSchema || multiRecognizerObject.$schema
|
||||
this.handler(`${multiRecognizerPath} loaded\n`)
|
||||
}
|
||||
|
||||
multiRecognizers.set(fileName, new MultiLanguageRecognizer(multiRecognizerPath, multiRecognizerContent, multiRecognizerSchema as string))
|
||||
}
|
||||
|
||||
if (settings === undefined) {
|
||||
const settingsPath = path.join(fileFolder, `luis.settings.${suffix}.${region}.json`)
|
||||
let settingsContent = {}
|
||||
if (fs.existsSync(settingsPath)) {
|
||||
settingsContent = JSON.parse(await fileHelper.getContentFromFile(settingsPath)).luis
|
||||
this.handler(`${settingsPath} loaded\n`)
|
||||
}
|
||||
|
||||
settings = new Settings(settingsPath, settingsContent)
|
||||
}
|
||||
|
||||
const content = new Content(fileContent, new LUOptions(fileName, true, fileCulture, file))
|
||||
luContents.push(content)
|
||||
|
||||
const dialogFile = path.join(fileFolder, `${content.name}.dialog`)
|
||||
let existingDialogObj: any
|
||||
if (fs.existsSync(dialogFile)) {
|
||||
existingDialogObj = JSON.parse(await fileHelper.getContentFromFile(dialogFile))
|
||||
this.handler(`${dialogFile} loaded\n`)
|
||||
}
|
||||
|
||||
if (existingDialogObj && schema) {
|
||||
existingDialogObj.$schema = schema
|
||||
}
|
||||
|
||||
let recognizer = Recognizer.load(content.path, content.name, dialogFile, settings, existingDialogObj, schema)
|
||||
recognizers.set(content.name, recognizer)
|
||||
}
|
||||
|
||||
// validate if there are duplicated files with same name and locale
|
||||
let setOfContents = new Set()
|
||||
const hasDuplicates = luContents.some(function (currentObj) {
|
||||
return setOfContents.size === setOfContents.add(currentObj.name).size
|
||||
})
|
||||
|
||||
if (hasDuplicates) {
|
||||
throw(new exception(retCode.errorCode.INVALID_INPUT_FILE, 'Files with same name and locale are found.'))
|
||||
}
|
||||
|
||||
return {luContents, recognizers, multiRecognizers, settings, crosstrainedRecognizers}
|
||||
}
|
||||
|
||||
async build(
|
||||
luContents: any[],
|
||||
recognizers: Map<string, Recognizer>,
|
||||
authoringKey: string,
|
||||
endpoint: string,
|
||||
botName: string,
|
||||
suffix: string,
|
||||
fallbackLocale: string,
|
||||
deleteOldVersion: boolean,
|
||||
isStaging: boolean,
|
||||
multiRecognizers?: Map<string, MultiLanguageRecognizer>,
|
||||
settings?: Settings,
|
||||
crosstrainedRecognizers?: Map<string, CrossTrainedRecognizer>,
|
||||
dialogType?: string,
|
||||
luisAPITPS?: number,
|
||||
timeBucketOfRequests?: number,
|
||||
retryCount?: number,
|
||||
retryDuration?: number) {
|
||||
// luis api TPS which means 5 concurrent transactions to luis api in 1 second
|
||||
// can set to other value if switched to a higher TPS(transaction per second) key
|
||||
let luisApiTps = luisAPITPS || 5
|
||||
|
||||
// set luis call delay duration to 1100 millisecond because 1000 can hit corner case of rate limit
|
||||
let timeBucket = timeBucketOfRequests || 1100
|
||||
|
||||
// set retry count for rate limit luis API failure
|
||||
let countForRetry = retryCount || 1
|
||||
|
||||
// set retry duration for rate limit luis API failure
|
||||
let durationForRetry = retryDuration || 1000
|
||||
|
||||
//default returned recognizer values
|
||||
let recognizerValues: Recognizer[] = []
|
||||
|
||||
let multiRecognizerValues: MultiLanguageRecognizer[] = []
|
||||
|
||||
let settingsValue: any
|
||||
|
||||
let crosstrainedRecognizerValues: CrossTrainedRecognizer[] = []
|
||||
|
||||
// filter if all lu contents are emtty
|
||||
let isAllLuEmpty = fileHelper.isAllFilesSectionEmpty(luContents)
|
||||
|
||||
if (!isAllLuEmpty) {
|
||||
const luBuildCore = new LuBuildCore(authoringKey, endpoint, countForRetry, durationForRetry)
|
||||
const apps = await luBuildCore.getApplicationList()
|
||||
|
||||
// here we do a while loop to make full use of luis tps capacity
|
||||
while (luContents.length > 0) {
|
||||
// get a number(set by luisApiTps) of contents for each loop
|
||||
const subLuContents = luContents.splice(0, luisApiTps)
|
||||
|
||||
// concurrently handle applications
|
||||
await Promise.all(subLuContents.map(async content => {
|
||||
// init current application object from lu content
|
||||
let currentApp = await this.initApplicationFromLuContent(content, botName, suffix)
|
||||
|
||||
// get recognizer
|
||||
let recognizer = recognizers.get(content.name) as Recognizer
|
||||
|
||||
// find if there is a matched name with current app under current authoring key
|
||||
if (!recognizer.getAppId()) {
|
||||
for (let app of apps) {
|
||||
if (app.name === currentApp.name) {
|
||||
recognizer.setAppId(app.id)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let needTrainAndPublish = false
|
||||
|
||||
// compare models to update the model if a match found
|
||||
// otherwise create a new application
|
||||
if (recognizer.getAppId() && recognizer.getAppId() !== '') {
|
||||
// To see if need update the model
|
||||
needTrainAndPublish = await this.updateApplication(currentApp, luBuildCore, recognizer, timeBucket, deleteOldVersion)
|
||||
} else {
|
||||
// create a new application
|
||||
needTrainAndPublish = await this.createApplication(currentApp, luBuildCore, recognizer, timeBucket)
|
||||
}
|
||||
|
||||
if (needTrainAndPublish) {
|
||||
// train and publish application
|
||||
await this.trainAndPublishApplication(luBuildCore, recognizer, timeBucket, isStaging)
|
||||
}
|
||||
|
||||
// update multiLanguageRecognizer asset
|
||||
if (multiRecognizers && multiRecognizers.has(content.id)) {
|
||||
let multiRecognizer = multiRecognizers.get(content.id) as MultiLanguageRecognizer
|
||||
multiRecognizer.recognizers[currentApp.culture] = path.basename(recognizer.getDialogPath(), '.dialog')
|
||||
if (currentApp.culture.toLowerCase() === fallbackLocale.toLowerCase()) {
|
||||
multiRecognizer.recognizers[''] = path.basename(recognizer.getDialogPath(), '.dialog')
|
||||
}
|
||||
}
|
||||
|
||||
if (crosstrainedRecognizers && crosstrainedRecognizers.has(content.id)) {
|
||||
let crosstrainedRecognizer = crosstrainedRecognizers.get(content.id) as CrossTrainedRecognizer
|
||||
if (!crosstrainedRecognizer.recognizers.includes(content.id + '.lu')) {
|
||||
crosstrainedRecognizer.recognizers.push(content.id + '.lu')
|
||||
}
|
||||
}
|
||||
|
||||
// update settings asset
|
||||
if (settings) {
|
||||
settings.luis[content.name.split('.').join('_').replace(/-/g, '_')] = recognizer.getAppId()
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
// write dialog assets
|
||||
if (recognizers) {
|
||||
recognizerValues = Array.from(recognizers.values())
|
||||
}
|
||||
|
||||
if (multiRecognizers) {
|
||||
multiRecognizerValues = Array.from(multiRecognizers.values())
|
||||
}
|
||||
|
||||
if (settings) {
|
||||
settingsValue = settings as Settings
|
||||
}
|
||||
}
|
||||
|
||||
if (dialogType === recognizerType.CROSSTRAINED && crosstrainedRecognizers) {
|
||||
crosstrainedRecognizerValues = Array.from(crosstrainedRecognizers.values())
|
||||
}
|
||||
|
||||
const dialogContents = this.generateDeclarativeAssets(recognizerValues, multiRecognizerValues, settingsValue, crosstrainedRecognizerValues)
|
||||
|
||||
return dialogContents
|
||||
}
|
||||
|
||||
async writeDialogAssets(contents: any[], force: boolean, out: string, luconfig: string) {
|
||||
let writeDone = false
|
||||
|
||||
let writeContents = contents.filter(c => c.id.endsWith('.dialog'))
|
||||
let settingsContents = contents.filter(c => c.id.endsWith('.json'))
|
||||
|
||||
if (settingsContents && settingsContents.length > 0) {
|
||||
let outPath
|
||||
if (luconfig) {
|
||||
outPath = path.join(path.resolve(path.dirname(luconfig)), settingsContents[0].id)
|
||||
} else if (out) {
|
||||
outPath = path.join(path.resolve(out), settingsContents[0].id)
|
||||
} else {
|
||||
outPath = path.resolve(settingsContents[0].id)
|
||||
}
|
||||
writeContents.push(this.mergeSettingsContent(outPath, settingsContents))
|
||||
}
|
||||
|
||||
for (const content of writeContents) {
|
||||
let outFilePath
|
||||
if (out) {
|
||||
outFilePath = path.join(path.resolve(out), path.basename(content.path))
|
||||
} else {
|
||||
outFilePath = content.path
|
||||
}
|
||||
|
||||
let fileExists = fs.existsSync(outFilePath)
|
||||
if (fileExists && outFilePath.endsWith('.lu.qna.dialog')) {
|
||||
let existingCTRecognizerObject = JSON.parse(await fileHelper.getContentFromFile(outFilePath))
|
||||
let currentCTRecognizerObject = JSON.parse(content.content)
|
||||
let ctRecognizerToBeMerged = currentCTRecognizerObject.recognizers.filter((r: string) => !existingCTRecognizerObject.recognizers.includes(r))
|
||||
existingCTRecognizerObject.recognizers = existingCTRecognizerObject.recognizers.concat(ctRecognizerToBeMerged)
|
||||
content.content = JSON.stringify(existingCTRecognizerObject, null, 4)
|
||||
}
|
||||
|
||||
if (force || !fs.existsSync(outFilePath)) {
|
||||
if (!fs.existsSync(path.dirname(outFilePath))) {
|
||||
fs.mkdirSync(path.dirname(outFilePath))
|
||||
}
|
||||
|
||||
this.handler(`Writing to ${outFilePath}\n`)
|
||||
await fs.writeFile(outFilePath, content.content, 'utf-8')
|
||||
writeDone = true
|
||||
}
|
||||
}
|
||||
|
||||
return writeDone
|
||||
}
|
||||
|
||||
async getActiveVersionIds(appNames: string[], authoringKey: string, region: string, retryCount?: number, retryDuration?: number) {
|
||||
const luBuildCore = new LuBuildCore(authoringKey, `https://${region}.api.cognitive.microsoft.com`, retryCount || 1, retryDuration || 1000)
|
||||
const apps = await luBuildCore.getApplicationList()
|
||||
let appNameVersionMap = new Map<string, string>()
|
||||
for (const appName of appNames) {
|
||||
// find if there is a matched name with current app under current authoring key
|
||||
appNameVersionMap.set(appName, '')
|
||||
for (let app of apps) {
|
||||
if (app.name === appName) {
|
||||
const appInfo = await luBuildCore.getApplicationInfo(app.id)
|
||||
appNameVersionMap.set(appName, appInfo.activeVersion)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return appNameVersionMap
|
||||
}
|
||||
|
||||
async initApplicationFromLuContent(content: any, botName: string, suffix: string) {
|
||||
let currentApp = await LuisBuilder.fromLUAsync([content]) // content.parseToLuis(true, content.language)
|
||||
currentApp.culture = currentApp.culture && currentApp.culture !== '' && currentApp.culture !== 'en-us' ? currentApp.culture : content.language as string
|
||||
currentApp.desc = currentApp.desc && currentApp.desc !== '' ? currentApp.desc : `Model for ${botName} app, targetting ${suffix}`
|
||||
|
||||
if (currentApp.name === undefined || currentApp.name === '') {
|
||||
currentApp.name = `${botName}(${suffix})-${content.name}`
|
||||
}
|
||||
|
||||
// remove empty intents from current app to avoid fewLabels error when training
|
||||
this.filterEmptyIntents(currentApp)
|
||||
|
||||
return currentApp
|
||||
}
|
||||
|
||||
async updateApplication(currentApp: any, luBuildCore: LuBuildCore, recognizer: Recognizer, timeBucket: number, deleteOldVersion: boolean) {
|
||||
await delay(timeBucket)
|
||||
const appInfo = await luBuildCore.getApplicationInfo(recognizer.getAppId())
|
||||
recognizer.versionId = appInfo.activeVersion || appInfo.endpoints.PRODUCTION.versionId
|
||||
|
||||
await delay(timeBucket)
|
||||
const existingApp = await luBuildCore.exportApplication(recognizer.getAppId(), recognizer.versionId)
|
||||
|
||||
// compare models
|
||||
const needUpdate = luBuildCore.compareApplications(currentApp, existingApp)
|
||||
if (needUpdate) {
|
||||
const newVersionId = luBuildCore.updateVersion(currentApp, existingApp)
|
||||
recognizer.versionId = newVersionId
|
||||
const options: any = {
|
||||
versionId: newVersionId
|
||||
}
|
||||
|
||||
this.handler(`${recognizer.getLuPath()} creating version=${newVersionId}\n`)
|
||||
await delay(timeBucket)
|
||||
await luBuildCore.importNewVersion(recognizer.getAppId(), currentApp, options)
|
||||
|
||||
if (deleteOldVersion) {
|
||||
await delay(timeBucket)
|
||||
const versionObjs = await luBuildCore.listApplicationVersions(recognizer.getAppId())
|
||||
for (const versionObj of versionObjs) {
|
||||
if (versionObj.version !== newVersionId) {
|
||||
this.handler(`${recognizer.getLuPath()} deleting old version=${versionObj.version}`)
|
||||
await delay(timeBucket)
|
||||
await luBuildCore.deleteVersion(recognizer.getAppId(), versionObj.version)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
} else {
|
||||
this.handler(`${recognizer.getLuPath()} no changes\n`)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async createApplication(currentApp: any, luBuildCore: LuBuildCore, recognizer: Recognizer, timeBucket: number) {
|
||||
currentApp.versionId = currentApp.versionId && currentApp.versionId !== '' ? currentApp.versionId : '0.1'
|
||||
recognizer.versionId = currentApp.versionId
|
||||
this.handler(`Creating LUIS.ai application: ${currentApp.name} version:${currentApp.versionId}\n`)
|
||||
await delay(timeBucket)
|
||||
const response = await luBuildCore.importApplication(currentApp)
|
||||
recognizer.setAppId(typeof response === 'string' ? response : response[Object.keys(response)[0]])
|
||||
return true
|
||||
}
|
||||
|
||||
async trainAndPublishApplication(luBuildCore: LuBuildCore, recognizer: Recognizer, timeBucket: number, isStaging: boolean) {
|
||||
// send train application request
|
||||
this.handler(`${recognizer.getLuPath()} training version=${recognizer.versionId}\n`)
|
||||
await delay(timeBucket)
|
||||
await luBuildCore.trainApplication(recognizer.getAppId(), recognizer.versionId)
|
||||
this.handler(`${recognizer.getLuPath()} waiting for training for version=${recognizer.versionId}...\n`)
|
||||
let done = true
|
||||
do {
|
||||
await delay(timeBucket)
|
||||
|
||||
// get training status to see if training completed
|
||||
let trainingStatus = await luBuildCore.getTrainingStatus(recognizer.getAppId(), recognizer.versionId)
|
||||
done = true
|
||||
for (let status of trainingStatus) {
|
||||
if (status.details) {
|
||||
if (status.details.status === 'InProgress' || status.details.status === 'Queued') {
|
||||
done = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
} while (!done)
|
||||
this.handler('done\n')
|
||||
|
||||
// publish applications
|
||||
this.handler(`${recognizer.getLuPath()} publishing version=${recognizer.versionId}\n`)
|
||||
await delay(timeBucket)
|
||||
await luBuildCore.publishApplication(recognizer.getAppId(), recognizer.versionId, isStaging)
|
||||
this.handler(`${recognizer.getLuPath()} publishing finished for ${isStaging ? 'Staging' : 'Production'} slot\n`)
|
||||
}
|
||||
|
||||
generateDeclarativeAssets(recognizers: Array<Recognizer>, multiRecognizers: Array<MultiLanguageRecognizer>, settings: Settings, crosstrainedRecognizers: Array<CrossTrainedRecognizer>)
|
||||
: Array<any> {
|
||||
let contents = new Array<any>()
|
||||
for (const recognizer of recognizers) {
|
||||
let content = new Content(recognizer.save(), new LUOptions(path.basename(recognizer.getDialogPath()), true, '', recognizer.getDialogPath()))
|
||||
contents.push(content)
|
||||
}
|
||||
|
||||
for (const multiRecognizer of multiRecognizers) {
|
||||
const multiLangContent = new Content(multiRecognizer.save(), new LUOptions(path.basename(multiRecognizer.getDialogPath()), true, '', multiRecognizer.getDialogPath()))
|
||||
contents.push(multiLangContent)
|
||||
}
|
||||
|
||||
if (settings) {
|
||||
const settingsContent = new Content(settings.save(), new LUOptions(path.basename(settings.getSettingsPath()), true, '', settings.getSettingsPath()))
|
||||
contents.push(settingsContent)
|
||||
}
|
||||
|
||||
for (const crosstrainedRecognizer of crosstrainedRecognizers) {
|
||||
const crosstrainedContent = new Content(crosstrainedRecognizer.save(), new LUOptions(path.basename(crosstrainedRecognizer.getDialogPath()), true, '', crosstrainedRecognizer.getDialogPath()))
|
||||
contents.push(crosstrainedContent)
|
||||
}
|
||||
|
||||
return contents
|
||||
}
|
||||
|
||||
mergeSettingsContent(settingsPath: string, contents: any[]) {
|
||||
let settings = new Settings(settingsPath, {})
|
||||
for (const content of contents) {
|
||||
const luisAppsMap = JSON.parse(content.content).luis
|
||||
for (const appName of Object.keys(luisAppsMap)) {
|
||||
settings.luis[appName] = luisAppsMap[appName]
|
||||
}
|
||||
}
|
||||
|
||||
return new Content(settings.save(), new LUOptions(path.basename(settings.getSettingsPath()), true, '', settings.getSettingsPath()))
|
||||
}
|
||||
|
||||
filterEmptyIntents(app: any) {
|
||||
const intents = app.intents
|
||||
const utterances = app.utterances
|
||||
const patterns = app.patterns
|
||||
|
||||
const emptyIntents = intents.filter((intent: any) => !utterances.some((utterance: any) => utterance.intent === intent.name)
|
||||
&& !patterns.some((pattern: any) => pattern.intent === intent.name))
|
||||
|
||||
if (emptyIntents && emptyIntents.length > 0) {
|
||||
const filteredIntents = intents.filter((intent: any) => !emptyIntents.some((emptyIntent: any) => emptyIntent.name === intent.name))
|
||||
this.handler(`[WARN]: empty intent(s) ${emptyIntents.map((intent: any) => '# ' + intent.name).join(', ')} are filtered when handling luis application`)
|
||||
app.intents = filteredIntents
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,427 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
import {CognitiveServicesCredentials} from '@azure/ms-rest-azure-js'
|
||||
import {LUISAuthoringClient} from '@azure/cognitiveservices-luis-authoring'
|
||||
import fetch from 'node-fetch'
|
||||
|
||||
const delay = require('delay')
|
||||
const retCode = require('./../utils/enums/CLI-errors')
|
||||
const exception = require('./../utils/exception')
|
||||
const Luis = require('./../luis/luis')
|
||||
|
||||
const rateLimitErrorCode = 429
|
||||
|
||||
export class LuBuildCore {
|
||||
private readonly client: any
|
||||
private readonly subscriptionKey: string
|
||||
private readonly endpoint: string
|
||||
private readonly retryCount: number
|
||||
private readonly retryDuration: number
|
||||
|
||||
constructor(subscriptionKey: string, endpoint: string, retryCount: number, retryDuration: number) {
|
||||
this.subscriptionKey = subscriptionKey
|
||||
this.endpoint = endpoint
|
||||
this.retryCount = retryCount
|
||||
this.retryDuration = retryDuration
|
||||
|
||||
// new luis api client
|
||||
const creds = new CognitiveServicesCredentials(subscriptionKey)
|
||||
this.client = new LUISAuthoringClient(creds, endpoint)
|
||||
}
|
||||
|
||||
public async getApplicationList() {
|
||||
let apps
|
||||
let retryCount = this.retryCount + 1
|
||||
let error
|
||||
while (retryCount > 0) {
|
||||
if (error === undefined || error.statusCode === rateLimitErrorCode) {
|
||||
try {
|
||||
apps = await this.client.apps.list(undefined, undefined)
|
||||
break
|
||||
} catch (e) {
|
||||
error = e
|
||||
retryCount--
|
||||
if (retryCount > 0) await delay(this.retryDuration)
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (retryCount === 0) {
|
||||
throw error
|
||||
}
|
||||
|
||||
return apps
|
||||
}
|
||||
|
||||
public async getApplicationInfo(appId: string) {
|
||||
let appInfo
|
||||
let retryCount = this.retryCount + 1
|
||||
let error
|
||||
while (retryCount > 0) {
|
||||
if (error === undefined || error.statusCode === rateLimitErrorCode) {
|
||||
try {
|
||||
appInfo = await this.client.apps.get(appId)
|
||||
break
|
||||
} catch (e) {
|
||||
error = e
|
||||
retryCount--
|
||||
if (retryCount > 0) await delay(this.retryDuration)
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (retryCount === 0) {
|
||||
throw error
|
||||
}
|
||||
|
||||
return appInfo
|
||||
}
|
||||
|
||||
public async importApplication(currentApp: any): Promise<any> {
|
||||
// let response = await this.client.apps.importMethod(currentApp)
|
||||
|
||||
const name = `?appName=${currentApp.name}`
|
||||
const url = this.endpoint + '/luis/authoring/v3.0-preview/apps/import' + name
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Ocp-Apim-Subscription-Key': this.subscriptionKey
|
||||
}
|
||||
|
||||
let messageData
|
||||
let retryCount = this.retryCount + 1
|
||||
let error: any
|
||||
while (retryCount > 0) {
|
||||
if (error === undefined || error.code === rateLimitErrorCode.toString()) {
|
||||
let response = await fetch(url, {method: 'POST', headers, body: JSON.stringify(currentApp)})
|
||||
messageData = await response.json()
|
||||
|
||||
if (messageData.error === undefined) break
|
||||
|
||||
error = messageData.error
|
||||
retryCount--
|
||||
if (retryCount > 0) await delay(this.retryDuration)
|
||||
} else {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, error.message))
|
||||
}
|
||||
}
|
||||
|
||||
if (retryCount === 0) {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, error.message))
|
||||
}
|
||||
|
||||
return messageData
|
||||
}
|
||||
|
||||
public async exportApplication(appId: string, versionId: string) {
|
||||
const url = this.endpoint + '/luis/authoring/v3.0-preview/apps/' + appId + '/versions/' + versionId + '/export?format=json'
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Ocp-Apim-Subscription-Key': this.subscriptionKey
|
||||
}
|
||||
|
||||
let messageData
|
||||
let retryCount = this.retryCount + 1
|
||||
let error
|
||||
while (retryCount > 0) {
|
||||
if (error === undefined || error.statusCode === rateLimitErrorCode) {
|
||||
try {
|
||||
const response = await fetch(url, {method: 'GET', headers})
|
||||
messageData = await response.json()
|
||||
break
|
||||
} catch (e) {
|
||||
error = e
|
||||
retryCount--
|
||||
if (retryCount > 0) await delay(this.retryDuration)
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (retryCount === 0) {
|
||||
throw error
|
||||
}
|
||||
|
||||
if (messageData.error) {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, messageData.error.message))
|
||||
}
|
||||
|
||||
return messageData
|
||||
}
|
||||
|
||||
public compareApplications(currentApp: any, existingApp: any) {
|
||||
currentApp.desc = currentApp.desc && currentApp.desc !== '' && currentApp.desc !== existingApp.desc ? currentApp.desc : existingApp.desc
|
||||
currentApp.culture = currentApp.culture && currentApp.culture !== '' && currentApp.culture !== existingApp.culture ? currentApp.culture : existingApp.culture
|
||||
currentApp.versionId = currentApp.versionId && currentApp.versionId !== '' && currentApp.versionId > existingApp.versionId ? currentApp.versionId : existingApp.versionId
|
||||
currentApp.name = existingApp.name
|
||||
|
||||
let currentAppToCompare = JSON.parse(JSON.stringify(currentApp));
|
||||
|
||||
// convert list entities to remove synonyms word in list which is same with canonicalForm
|
||||
(currentAppToCompare.closedLists || []).forEach((c: any) => {
|
||||
(c.subLists || []).forEach((s: any) => {
|
||||
if (s.list) {
|
||||
const foundIndex = s.list.indexOf(s.canonicalForm)
|
||||
if (foundIndex > -1) {
|
||||
s.list.splice(foundIndex, 1)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// skip comparisons of properties that LUIS API automatically added or updated
|
||||
currentAppToCompare.luis_schema_version = existingApp.luis_schema_version
|
||||
currentAppToCompare.tokenizerVersion = existingApp.tokenizerVersion
|
||||
currentAppToCompare.settings = existingApp.settings
|
||||
|
||||
// skip none intent comparison if that is automatically added by LUIS server
|
||||
if (currentAppToCompare.intents && !currentAppToCompare.intents.some((x: any) => x.name === 'None')) {
|
||||
const existingNoneIntentIndex = existingApp.intents.findIndex((x: any) => x.name === 'None')
|
||||
if (existingNoneIntentIndex > -1) existingApp.intents.splice(existingNoneIntentIndex, 1)
|
||||
}
|
||||
|
||||
// compare lu contents converted from luis josn
|
||||
const isApplicationEqual = this.isApplicationEqual(currentAppToCompare, existingApp)
|
||||
|
||||
return !isApplicationEqual
|
||||
}
|
||||
|
||||
public updateVersion(currentApp: any, existingApp: any) {
|
||||
let newVersionId: string
|
||||
if (currentApp.versionId > existingApp.versionId) {
|
||||
newVersionId = currentApp.versionId
|
||||
} else {
|
||||
newVersionId = this.updateVersionValue(existingApp.versionId)
|
||||
}
|
||||
|
||||
currentApp.versionId = newVersionId
|
||||
|
||||
return newVersionId
|
||||
}
|
||||
|
||||
public async importNewVersion(appId: string, app: any, options: any) {
|
||||
// await this.client.versions.importMethod(appId, app, options)
|
||||
|
||||
const versionId = `?versionId=${options.versionId}`
|
||||
let url = this.endpoint + '/luis/authoring/v3.0-preview/apps/' + appId + '/versions/import' + versionId
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Ocp-Apim-Subscription-Key': this.subscriptionKey
|
||||
}
|
||||
|
||||
let messageData
|
||||
let retryCount = this.retryCount + 1
|
||||
let error: any
|
||||
while (retryCount > 0) {
|
||||
if (error === undefined || error.code === rateLimitErrorCode.toString()) {
|
||||
let response = await fetch(url, {method: 'POST', headers, body: JSON.stringify(app)})
|
||||
messageData = await response.json()
|
||||
|
||||
if (messageData.error === undefined) break
|
||||
|
||||
error = messageData.error
|
||||
retryCount--
|
||||
if (retryCount > 0) await delay(this.retryDuration)
|
||||
} else {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, error.message))
|
||||
}
|
||||
}
|
||||
|
||||
if (retryCount === 0) {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, error.message))
|
||||
}
|
||||
|
||||
return messageData
|
||||
}
|
||||
|
||||
public async listApplicationVersions(appId: string) {
|
||||
let appVersions
|
||||
let retryCount = this.retryCount + 1
|
||||
let error
|
||||
while (retryCount > 0) {
|
||||
if (error === undefined || error.statusCode === rateLimitErrorCode) {
|
||||
try {
|
||||
appVersions = await this.client.versions.list(appId)
|
||||
break
|
||||
} catch (e) {
|
||||
error = e
|
||||
retryCount--
|
||||
if (retryCount > 0) await delay(this.retryDuration)
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (retryCount === 0) {
|
||||
throw error
|
||||
}
|
||||
|
||||
return appVersions
|
||||
}
|
||||
|
||||
public async deleteVersion(appId: string, versionId: string) {
|
||||
let retryCount = this.retryCount + 1
|
||||
let error
|
||||
while (retryCount > 0) {
|
||||
if (error === undefined || error.statusCode === rateLimitErrorCode) {
|
||||
try {
|
||||
await this.client.versions.deleteMethod(appId, versionId)
|
||||
break
|
||||
} catch (e) {
|
||||
error = e
|
||||
retryCount--
|
||||
if (retryCount > 0) await delay(this.retryDuration)
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (retryCount === 0) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
public async trainApplication(appId: string, versionId: string) {
|
||||
let retryCount = this.retryCount + 1
|
||||
let error
|
||||
while (retryCount > 0) {
|
||||
if (error === undefined || error.statusCode === rateLimitErrorCode) {
|
||||
try {
|
||||
await this.client.train.trainVersion(appId, versionId)
|
||||
break
|
||||
} catch (e) {
|
||||
error = e
|
||||
retryCount--
|
||||
if (retryCount > 0) await delay(this.retryDuration)
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (retryCount === 0) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
public async getTrainingStatus(appId: string, versionId: string) {
|
||||
let status
|
||||
let retryCount = this.retryCount + 1
|
||||
let error
|
||||
while (retryCount > 0) {
|
||||
if (error === undefined || error.statusCode === rateLimitErrorCode) {
|
||||
try {
|
||||
status = await this.client.train.getStatus(appId, versionId)
|
||||
break
|
||||
} catch (e) {
|
||||
error = e
|
||||
retryCount--
|
||||
if (retryCount > 0) await delay(this.retryDuration)
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (retryCount === 0) {
|
||||
throw error
|
||||
}
|
||||
|
||||
return status
|
||||
}
|
||||
|
||||
public async publishApplication(appId: string, versionId: string, isStaging: boolean) {
|
||||
let retryCount = this.retryCount + 1
|
||||
let error
|
||||
while (retryCount > 0) {
|
||||
if (error === undefined || error.statusCode === rateLimitErrorCode) {
|
||||
try {
|
||||
await this.client.apps.publish(appId,
|
||||
{
|
||||
versionId,
|
||||
isStaging
|
||||
})
|
||||
break
|
||||
} catch (e) {
|
||||
error = e
|
||||
retryCount--
|
||||
if (retryCount > 0) await delay(this.retryDuration)
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (retryCount === 0) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
private updateVersionValue(versionId: string) {
|
||||
let numberVersionId = parseFloat(versionId)
|
||||
if (isNaN(numberVersionId)) {
|
||||
const index = versionId.lastIndexOf('-')
|
||||
if (index > 0) {
|
||||
const strVersion = versionId.substring(0, index)
|
||||
const numberVersion = versionId.substring(index + 1)
|
||||
numberVersionId = parseFloat(numberVersion)
|
||||
if (isNaN(numberVersionId)) {
|
||||
return versionId
|
||||
} else {
|
||||
const newVersionId = numberVersionId + 0.1
|
||||
|
||||
return strVersion + '-' + newVersionId.toFixed(1)
|
||||
}
|
||||
} else {
|
||||
return versionId + '-0.1'
|
||||
}
|
||||
} else {
|
||||
return (numberVersionId + 0.1).toFixed(1)
|
||||
}
|
||||
}
|
||||
|
||||
private isApplicationEqual(appA: any, appB: any) {
|
||||
let appALuis = new Luis(appA)
|
||||
this.sortLuis(appALuis)
|
||||
let appALu = appALuis.parseToLuContent().toLowerCase()
|
||||
|
||||
let appBLuis = new Luis(appB)
|
||||
this.sortLuis(appBLuis)
|
||||
let appBLu = appBLuis.parseToLuContent().toLowerCase()
|
||||
|
||||
return appALu === appBLu
|
||||
}
|
||||
|
||||
private sortLuis(app: any) {
|
||||
this.sortProperty(app.intents, 'name')
|
||||
this.sortProperty(app.closedLists, 'name')
|
||||
this.sortProperty(app.composites, 'name')
|
||||
this.sortProperty(app.entities, 'name')
|
||||
this.sortProperty(app.model_features, 'name')
|
||||
this.sortProperty(app.phraselists, 'name')
|
||||
this.sortProperty(app.patternAnyEntities, 'name')
|
||||
this.sortProperty(app.patterns, 'pattern')
|
||||
this.sortProperty(app.prebuiltEntities, 'name')
|
||||
this.sortProperty(app.regex_entities, 'name')
|
||||
this.sortProperty(app.regexEntities, 'name')
|
||||
this.sortProperty(app.utterances, 'text')
|
||||
}
|
||||
|
||||
private sortProperty(arrayToSort: any[], propertyToSort: string) {
|
||||
(arrayToSort || []).sort((a: any, b: any) => {
|
||||
const aValue = a[propertyToSort].toLowerCase()
|
||||
const bValue = b[propertyToSort].toLowerCase()
|
||||
|
||||
return aValue < bValue ? -1 : aValue > bValue ? 1 : 0
|
||||
})
|
||||
}
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
export class CrossTrainedRecognizer {
|
||||
public recognizers: any
|
||||
private readonly dialogPath: string
|
||||
private readonly $schema: string | undefined
|
||||
|
||||
constructor(dialogPath: string, recognizers: any, schema?: string) {
|
||||
this.dialogPath = dialogPath
|
||||
this.recognizers = recognizers
|
||||
this.$schema = schema
|
||||
}
|
||||
|
||||
save(): string {
|
||||
let output: any = {
|
||||
$kind: 'Microsoft.CrossTrainedRecognizerSet',
|
||||
recognizers: this.recognizers
|
||||
}
|
||||
|
||||
if (this.$schema) {
|
||||
output = {$schema: this.$schema, ...output}
|
||||
}
|
||||
|
||||
return JSON.stringify(output, null, 4)
|
||||
}
|
||||
|
||||
getDialogPath(): string {
|
||||
return this.dialogPath
|
||||
}
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const path = require('path')
|
||||
|
||||
export class MultiLanguageRecognizer {
|
||||
public recognizers: any
|
||||
private readonly id: string
|
||||
private readonly dialogPath: string
|
||||
private readonly $schema: string | undefined
|
||||
|
||||
constructor(dialogPath: string, recognizers: any, schema?: string) {
|
||||
this.id = `LUIS_${path.basename(dialogPath).split('.')[0]}`
|
||||
this.dialogPath = dialogPath
|
||||
this.recognizers = recognizers
|
||||
this.$schema = schema
|
||||
}
|
||||
|
||||
save(): string {
|
||||
let output: any = {
|
||||
$kind: 'Microsoft.MultiLanguageRecognizer',
|
||||
id: this.id,
|
||||
recognizers: this.recognizers
|
||||
}
|
||||
|
||||
if (this.$schema) {
|
||||
output = {$schema: this.$schema, ...output}
|
||||
}
|
||||
|
||||
return JSON.stringify(output, null, 4)
|
||||
}
|
||||
|
||||
getDialogPath(): string {
|
||||
return this.dialogPath
|
||||
}
|
||||
}
|
|
@ -0,0 +1,76 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
import {Settings} from './settings'
|
||||
import * as path from 'path'
|
||||
|
||||
export class Recognizer {
|
||||
static load(luFile: string, targetFileName: string, dialogPath: string, luisSettings: Settings, existingRecognizer: any, schema?: string): Recognizer {
|
||||
if (existingRecognizer) {
|
||||
let recognizer = new Recognizer(luFile, targetFileName, schema)
|
||||
recognizer.dialogPath = dialogPath
|
||||
Object.assign(recognizer, existingRecognizer)
|
||||
recognizer.setAppId(luisSettings.luis[path.basename(luFile).split('.').join('_')])
|
||||
|
||||
return recognizer
|
||||
}
|
||||
|
||||
let recognizer = new Recognizer(luFile, targetFileName, schema)
|
||||
recognizer.dialogPath = dialogPath
|
||||
|
||||
return recognizer
|
||||
}
|
||||
|
||||
versionId: string
|
||||
private readonly id: string
|
||||
private readonly applicationId: string | undefined
|
||||
private readonly endpoint: string | undefined
|
||||
private readonly endpointKey: string | undefined
|
||||
private readonly $schema: string | undefined
|
||||
private appId: string
|
||||
private dialogPath: string | undefined
|
||||
|
||||
constructor(private readonly luFile: string, targetFileName: string, schema?: string) {
|
||||
this.appId = ''
|
||||
this.id = `LUIS_${targetFileName.split('.')[0]}`
|
||||
this.applicationId = `=settings.luis.${targetFileName.split('.').join('_').replace(/-/g, '_')}`
|
||||
this.endpoint = '=settings.luis.endpoint'
|
||||
this.endpointKey = '=settings.luis.endpointKey'
|
||||
this.versionId = '0.1'
|
||||
this.$schema = schema
|
||||
}
|
||||
|
||||
save(): string {
|
||||
let output: any = {
|
||||
$kind: 'Microsoft.LuisRecognizer',
|
||||
id: this.id,
|
||||
applicationId: this.applicationId,
|
||||
endpoint: this.endpoint,
|
||||
endpointKey: this.endpointKey
|
||||
}
|
||||
|
||||
if (this.$schema) {
|
||||
output = {$schema: this.$schema, ...output}
|
||||
}
|
||||
|
||||
return JSON.stringify(output, null, 4)
|
||||
}
|
||||
|
||||
getAppId(): string {
|
||||
return this.appId
|
||||
}
|
||||
|
||||
setAppId(appId: string) {
|
||||
this.appId = appId
|
||||
}
|
||||
|
||||
getDialogPath(): string {
|
||||
return this.dialogPath as string
|
||||
}
|
||||
|
||||
getLuPath() {
|
||||
return this.luFile
|
||||
}
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
export class Settings {
|
||||
public luis: any
|
||||
private readonly settingsPath: string
|
||||
|
||||
constructor(settingsPath: string, luis: any) {
|
||||
this.settingsPath = settingsPath
|
||||
this.luis = luis
|
||||
}
|
||||
|
||||
save(): string {
|
||||
let output = {
|
||||
luis: this.luis
|
||||
}
|
||||
|
||||
return JSON.stringify(output, null, 4)
|
||||
}
|
||||
|
||||
getSettingsPath(): string {
|
||||
return this.settingsPath
|
||||
}
|
||||
}
|
|
@ -0,0 +1,231 @@
|
|||
lexer grammar LUFileLexer;
|
||||
|
||||
// fragments
|
||||
fragment A: 'a' | 'A';
|
||||
fragment B: 'b' | 'B';
|
||||
fragment C: 'c' | 'C';
|
||||
fragment D: 'd' | 'D';
|
||||
fragment E: 'e' | 'E';
|
||||
fragment F: 'f' | 'F';
|
||||
fragment G: 'g' | 'G';
|
||||
fragment H: 'h' | 'H';
|
||||
fragment I: 'i' | 'I';
|
||||
fragment J: 'j' | 'J';
|
||||
fragment K: 'k' | 'K';
|
||||
fragment L: 'l' | 'L';
|
||||
fragment M: 'm' | 'M';
|
||||
fragment N: 'n' | 'N';
|
||||
fragment O: 'o' | 'O';
|
||||
fragment P: 'p' | 'P';
|
||||
fragment Q: 'q' | 'Q';
|
||||
fragment R: 'r' | 'R';
|
||||
fragment S: 's' | 'S';
|
||||
fragment T: 't' | 'T';
|
||||
fragment U: 'u' | 'U';
|
||||
fragment V: 'v' | 'V';
|
||||
fragment W: 'w' | 'W';
|
||||
fragment X: 'x' | 'X';
|
||||
fragment Y: 'y' | 'Y';
|
||||
fragment Z: 'z' | 'Z';
|
||||
|
||||
fragment LETTER: 'a'..'z' | 'A'..'Z';
|
||||
fragment NUMBER: '0'..'9';
|
||||
|
||||
fragment WHITESPACE
|
||||
: ' '|'\t'|'\ufeff'|'\u00a0'
|
||||
;
|
||||
|
||||
fragment UTTERANCE_MARK: '-' | '*' | '+';
|
||||
|
||||
WS
|
||||
: WHITESPACE+
|
||||
;
|
||||
|
||||
NEWLINE
|
||||
: '\r'? '\n' -> skip
|
||||
;
|
||||
|
||||
QNA_SOURCE_INFO
|
||||
: WS* '>' WS* '!# @qna.pair.source' WS* '=' ~('\r'|'\n')+
|
||||
;
|
||||
|
||||
MODEL_INFO
|
||||
: WS* '>' WS* '!#' ~('\r'|'\n')+
|
||||
;
|
||||
|
||||
COMMENT
|
||||
: WS* '>' ~('\r'|'\n')* -> skip
|
||||
;
|
||||
|
||||
QNA
|
||||
: '#'+ WS* '?' -> pushMode(QNA_MODE)
|
||||
;
|
||||
|
||||
HASH
|
||||
: '#' -> pushMode(INTENT_NAME_MODE)
|
||||
;
|
||||
|
||||
DASH
|
||||
: UTTERANCE_MARK -> pushMode(LIST_BODY_MODE)
|
||||
;
|
||||
|
||||
DOLLAR
|
||||
: '$' -> pushMode(ENTITY_MODE)
|
||||
;
|
||||
|
||||
AT
|
||||
: '@' -> pushMode(NEW_ENTITY_MODE)
|
||||
;
|
||||
|
||||
IMPORT
|
||||
: '[' ~[\r\n[\]]*? ']' '(' ~[\r\n()]*? ')'
|
||||
;
|
||||
|
||||
FILTER_MARK
|
||||
: '**' F I L T E R S ':**'
|
||||
;
|
||||
|
||||
QNA_ID_MARK
|
||||
: '<a' .*? '</a>'
|
||||
;
|
||||
|
||||
MULTI_LINE_TEXT
|
||||
: '```' .*? '```'
|
||||
;
|
||||
PROMPT_MARK
|
||||
: '**' P R O M P T S ':**'
|
||||
;
|
||||
|
||||
INVALID_TOKEN_DEFAULT_MODE
|
||||
: .
|
||||
;
|
||||
|
||||
mode NEW_ENTITY_MODE;
|
||||
|
||||
WS_IN_NEW_ENTITY
|
||||
: WS -> type(WS)
|
||||
;
|
||||
|
||||
NEWLINE_IN_NEW_ENTITY
|
||||
: '\r'? '\n' -> type(NEWLINE), popMode
|
||||
;
|
||||
|
||||
EQUAL
|
||||
: '='
|
||||
;
|
||||
|
||||
COMMA
|
||||
: ','
|
||||
;
|
||||
|
||||
HAS_ROLES_LABEL
|
||||
: H A S R O L E S?
|
||||
;
|
||||
|
||||
HAS_FEATURES_LABEL
|
||||
: U S E S F E A T U R E S?
|
||||
;
|
||||
|
||||
NEW_ENTITY_TYPE_IDENTIFIER
|
||||
: S I M P L E | L I S T | R E G E X | P R E B U I L T | C O M P O S I T E | M L | P A T T E R N A N Y | P H R A S E L I S T | I N T E N T
|
||||
;
|
||||
|
||||
PHRASE_LIST_LABEL
|
||||
: '(' (~[\r\n])* ')'
|
||||
;
|
||||
|
||||
NEW_COMPOSITE_ENTITY
|
||||
: '[' (~[\r\n{}[\]()])* ']'
|
||||
;
|
||||
|
||||
NEW_REGEX_ENTITY
|
||||
: '/' (~[\r\n])*
|
||||
;
|
||||
|
||||
NEW_ENTITY_IDENTIFIER
|
||||
: (~[ \t\r\n,;'"])+
|
||||
;
|
||||
|
||||
NEW_ENTITY_IDENTIFIER_WITH_WS
|
||||
: ('\'' | '"') (~[\t\r\n,;'"])+ ('\'' | '"')
|
||||
;
|
||||
|
||||
mode INTENT_NAME_MODE;
|
||||
|
||||
WS_IN_NAME
|
||||
: WS -> type(WS)
|
||||
;
|
||||
|
||||
HASH_IN_NAME
|
||||
: '#' -> type(HASH)
|
||||
;
|
||||
|
||||
NEWLINE_IN_NAME
|
||||
: '\r'? '\n' -> skip, popMode
|
||||
;
|
||||
|
||||
IDENTIFIER
|
||||
: (LETTER | NUMBER | '_') (LETTER | NUMBER | '-' | '_')*
|
||||
;
|
||||
|
||||
DOT
|
||||
: '.'
|
||||
;
|
||||
|
||||
mode LIST_BODY_MODE;
|
||||
|
||||
WS_IN_LIST_BODY
|
||||
: WS -> type(WS)
|
||||
;
|
||||
|
||||
NEWLINE_IN_LIST_BODY
|
||||
: '\r'? '\n' -> type(NEWLINE), popMode
|
||||
;
|
||||
|
||||
ESCAPE_CHARACTER
|
||||
: '\\' ~[\r\n]?
|
||||
;
|
||||
|
||||
EXPRESSION
|
||||
: '{' (~[\r\n{}] | ('{' ~[\r\n]* '}'))* '}'
|
||||
;
|
||||
|
||||
TEXT
|
||||
: ~[ \t\r\n\\]+?
|
||||
;
|
||||
|
||||
mode ENTITY_MODE;
|
||||
|
||||
WS_IN_ENTITY
|
||||
: WS -> type(WS)
|
||||
;
|
||||
|
||||
NEWLINE_IN_ENTITY
|
||||
: '\r'? '\n' -> skip, popMode
|
||||
;
|
||||
|
||||
COMPOSITE_ENTITY
|
||||
: '[' (~[\r\n{}[\]()])* ']'
|
||||
;
|
||||
|
||||
REGEX_ENTITY
|
||||
: '/' (~[\r\n])*
|
||||
;
|
||||
|
||||
ENTITY_TEXT
|
||||
: ~[ \t\r\n:]+
|
||||
;
|
||||
|
||||
COLON_MARK
|
||||
: ':'
|
||||
;
|
||||
|
||||
mode QNA_MODE;
|
||||
|
||||
NEWLINE_IN_QNA
|
||||
: '\r'? '\n' -> skip, popMode
|
||||
;
|
||||
|
||||
QNA_TEXT
|
||||
: ~[\t\r\n]+
|
||||
;
|
|
@ -0,0 +1,241 @@
|
|||
parser grammar LUFileParser;
|
||||
|
||||
options { tokenVocab=LUFileLexer; }
|
||||
|
||||
file
|
||||
: paragraph+? EOF
|
||||
;
|
||||
|
||||
paragraph
|
||||
: newline
|
||||
| nestedIntentSection
|
||||
| simpleIntentSection
|
||||
| entitySection
|
||||
| newEntitySection
|
||||
| importSection
|
||||
| qnaSection
|
||||
| modelInfoSection
|
||||
;
|
||||
|
||||
// Treat EOF as newline to hanle file end gracefully
|
||||
// It's possible that parser doesn't even have to handle NEWLINE,
|
||||
// but before the syntax is finalized, we still keep the NEWLINE in grammer
|
||||
newline
|
||||
: WS* (NEWLINE | EOF)
|
||||
;
|
||||
|
||||
errorString
|
||||
: (WS|INVALID_TOKEN_DEFAULT_MODE)+
|
||||
;
|
||||
|
||||
nestedIntentSection
|
||||
: nestedIntentNameLine nestedIntentBodyDefinition
|
||||
;
|
||||
|
||||
nestedIntentNameLine
|
||||
: WS* HASH WS* nestedIntentName
|
||||
;
|
||||
|
||||
nestedIntentName
|
||||
: nameIdentifier (WS|nameIdentifier)*
|
||||
;
|
||||
|
||||
nameIdentifier
|
||||
: IDENTIFIER (DOT IDENTIFIER)*
|
||||
;
|
||||
|
||||
nestedIntentBodyDefinition
|
||||
: subIntentDefinition+
|
||||
;
|
||||
|
||||
subIntentDefinition
|
||||
: WS* HASH simpleIntentSection
|
||||
;
|
||||
|
||||
simpleIntentSection
|
||||
: intentDefinition
|
||||
;
|
||||
|
||||
intentDefinition
|
||||
: intentNameLine intentBody?
|
||||
;
|
||||
|
||||
intentNameLine
|
||||
: WS* HASH HASH? WS* intentName
|
||||
;
|
||||
|
||||
intentName
|
||||
: nameIdentifier (WS|nameIdentifier)*
|
||||
;
|
||||
|
||||
intentBody
|
||||
: WS* normalIntentBody
|
||||
;
|
||||
|
||||
normalIntentBody
|
||||
: WS* ((normalIntentString newline) | errorString)+
|
||||
;
|
||||
|
||||
normalIntentString
|
||||
: WS* DASH (WS|TEXT|EXPRESSION|ESCAPE_CHARACTER)*
|
||||
;
|
||||
|
||||
newEntitySection
|
||||
: newEntityDefinition
|
||||
;
|
||||
|
||||
newEntityDefinition
|
||||
: newEntityLine newEntityListbody?
|
||||
;
|
||||
|
||||
newEntityListbody
|
||||
: ((normalItemString newline) | errorString)+
|
||||
;
|
||||
|
||||
newEntityLine
|
||||
: WS* AT WS* newEntityType? WS* (newEntityName|newEntityNameWithWS)? WS* newEntityRoles? WS* newEntityUsesFeatures? WS* EQUAL? WS* (newCompositeDefinition|newRegexDefinition)? newline
|
||||
;
|
||||
|
||||
newCompositeDefinition
|
||||
: NEW_COMPOSITE_ENTITY
|
||||
;
|
||||
|
||||
newRegexDefinition
|
||||
: NEW_REGEX_ENTITY
|
||||
;
|
||||
|
||||
newEntityType
|
||||
: NEW_ENTITY_TYPE_IDENTIFIER
|
||||
;
|
||||
|
||||
newEntityRoles
|
||||
: HAS_ROLES_LABEL? WS* newEntityRoleOrFeatures
|
||||
;
|
||||
|
||||
newEntityUsesFeatures
|
||||
: HAS_FEATURES_LABEL WS* newEntityRoleOrFeatures
|
||||
;
|
||||
|
||||
newEntityRoleOrFeatures
|
||||
: (NEW_ENTITY_IDENTIFIER|NEW_ENTITY_IDENTIFIER_WITH_WS) (WS* COMMA WS* (NEW_ENTITY_IDENTIFIER|NEW_ENTITY_IDENTIFIER_WITH_WS))*
|
||||
;
|
||||
|
||||
newEntityName
|
||||
: NEW_ENTITY_IDENTIFIER (WS* PHRASE_LIST_LABEL)?
|
||||
;
|
||||
|
||||
newEntityNameWithWS
|
||||
: NEW_ENTITY_IDENTIFIER_WITH_WS (WS* PHRASE_LIST_LABEL)?
|
||||
;
|
||||
|
||||
entitySection
|
||||
: entityDefinition
|
||||
;
|
||||
|
||||
entityDefinition
|
||||
: entityLine entityListBody?
|
||||
;
|
||||
|
||||
entityLine
|
||||
: WS* DOLLAR entityName? COLON_MARK? entityType?
|
||||
;
|
||||
|
||||
entityName
|
||||
: (ENTITY_TEXT|WS)+
|
||||
;
|
||||
|
||||
entityType
|
||||
: (compositeEntityIdentifier|regexEntityIdentifier|ENTITY_TEXT|COLON_MARK|WS)+
|
||||
;
|
||||
|
||||
compositeEntityIdentifier
|
||||
: COMPOSITE_ENTITY
|
||||
;
|
||||
|
||||
regexEntityIdentifier
|
||||
: REGEX_ENTITY
|
||||
;
|
||||
|
||||
entityListBody
|
||||
: ((normalItemString newline) | errorString)+
|
||||
;
|
||||
|
||||
normalItemString
|
||||
: WS* DASH (WS|TEXT|EXPRESSION|ESCAPE_CHARACTER)*
|
||||
;
|
||||
|
||||
importSection
|
||||
: importDefinition
|
||||
;
|
||||
|
||||
importDefinition
|
||||
: WS* IMPORT WS*
|
||||
;
|
||||
|
||||
qnaSection
|
||||
: qnaDefinition
|
||||
;
|
||||
|
||||
qnaDefinition
|
||||
: qnaSourceInfo? qnaIdMark? qnaQuestion moreQuestionsBody qnaAnswerBody promptSection?
|
||||
;
|
||||
|
||||
qnaSourceInfo
|
||||
: WS* QNA_SOURCE_INFO
|
||||
;
|
||||
|
||||
qnaIdMark
|
||||
: WS* QNA_ID_MARK
|
||||
;
|
||||
|
||||
qnaQuestion
|
||||
: WS* QNA questionText
|
||||
;
|
||||
|
||||
questionText
|
||||
: QNA_TEXT*
|
||||
;
|
||||
|
||||
moreQuestionsBody
|
||||
: WS* ((moreQuestion newline) | errorQuestionString)*
|
||||
;
|
||||
|
||||
moreQuestion
|
||||
: DASH (WS|TEXT)*
|
||||
;
|
||||
|
||||
errorQuestionString
|
||||
: (WS|INVALID_TOKEN_DEFAULT_MODE)+
|
||||
;
|
||||
|
||||
qnaAnswerBody
|
||||
: ((filterSection? multiLineAnswer)|(multiLineAnswer filterSection?))
|
||||
;
|
||||
|
||||
filterSection
|
||||
: WS* FILTER_MARK (filterLine | errorFilterLine)+
|
||||
;
|
||||
|
||||
promptSection
|
||||
: WS* PROMPT_MARK (filterLine | errorFilterLine)+
|
||||
;
|
||||
|
||||
filterLine
|
||||
: WS* DASH (WS|TEXT)* newline
|
||||
;
|
||||
|
||||
errorFilterLine
|
||||
: (WS|INVALID_TOKEN_DEFAULT_MODE)+
|
||||
;
|
||||
|
||||
multiLineAnswer
|
||||
: WS* MULTI_LINE_TEXT
|
||||
;
|
||||
|
||||
modelInfoSection
|
||||
: modelInfoDefinition
|
||||
;
|
||||
|
||||
modelInfoDefinition
|
||||
: WS* MODEL_INFO
|
||||
;
|
|
@ -0,0 +1,19 @@
|
|||
class BaseSection {
|
||||
constructor(parameters) {
|
||||
this.Errors = [];
|
||||
this.SectionType = '';
|
||||
this.Id = '';
|
||||
this.Body = '';
|
||||
this.Range;
|
||||
|
||||
if (parameters) {
|
||||
this.Errors = parameters.Errors || [];
|
||||
this.SectionType = parameters.SectionType || '';
|
||||
this.Id = parameters.Id || '';
|
||||
this.Body = parameters.Body || '';
|
||||
this.Range = parameters.Range;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BaseSection;
|
|
@ -0,0 +1,19 @@
|
|||
/**
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
class FileToParse {
|
||||
/**
|
||||
* @property {string} filePath
|
||||
*/
|
||||
/**
|
||||
* @property {Boolean} includeInCollate
|
||||
*/
|
||||
constructor(filePath, includeInCollate) {
|
||||
this.filePath = filePath?filePath:'';
|
||||
if(includeInCollate === undefined) this.includeInCollate = true;
|
||||
else this.includeInCollate = includeInCollate;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FileToParse;
|
|
@ -0,0 +1,156 @@
|
|||
/**
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
const readerObj = {
|
||||
readerObject : class {
|
||||
constructor(sourceFile, model) {
|
||||
this.sourceFile = sourceFile?sourceFile:'';
|
||||
this.model = model?model:'';
|
||||
}
|
||||
},
|
||||
entity : class {
|
||||
constructor(entity, value, start, end) {
|
||||
this.entity = entity?entity:'';
|
||||
this.value = value?value:'';
|
||||
this.start = !isNaN(start)?start:-1;
|
||||
this.end = !isNaN(end)?end:-1;
|
||||
}
|
||||
},
|
||||
intent: class {
|
||||
constructor(intent, utterances) {
|
||||
this.intent = intent?intent:'';
|
||||
this.utterances = utterances?utterances:[]
|
||||
}
|
||||
},
|
||||
uttereances: class {
|
||||
constructor(text, intent, entities) {
|
||||
this.text = text?text:'';
|
||||
this.intent = intent?intent:'';
|
||||
this.entities = entities?entities:[];
|
||||
}
|
||||
},
|
||||
rLuisObj: class {
|
||||
constructor(intents, patterns) {
|
||||
this.intents = intents?intents:[];
|
||||
this.patterns = patterns?patterns:[];
|
||||
}
|
||||
},
|
||||
pattern: class {
|
||||
constructor(pattern, intent) {
|
||||
this.pattern = pattern?pattern:'';
|
||||
this.intent = intent?intent:'';
|
||||
}
|
||||
},
|
||||
modelObj: class {
|
||||
constructor(name, mode, words, activated) {
|
||||
this.name = name?name:'';
|
||||
this.words = words?words:'';
|
||||
this.mode = mode?mode:false;
|
||||
this.activated = activated?activated:false;
|
||||
}
|
||||
},
|
||||
subList: class {
|
||||
constructor(canonicalForm, list) {
|
||||
this.canonicalForm = canonicalForm?canonicalForm:'';
|
||||
this.list = list?list:[];
|
||||
}
|
||||
},
|
||||
closedLists: class {
|
||||
constructor(name, subLists, roles) {
|
||||
this.name = name?name:'';
|
||||
this.subLists = subLists?subLists:[];
|
||||
this.roles = roles?roles:[];
|
||||
}
|
||||
},
|
||||
regExEntity: class {
|
||||
constructor (name, regexPattern, roles) {
|
||||
this.name = name ? name : '';
|
||||
this.regexPattern = regexPattern ? regexPattern : '';
|
||||
this.roles = roles ? roles : [];
|
||||
}
|
||||
},
|
||||
compositeEntity: class {
|
||||
constructor (name, children, roles) {
|
||||
this.name = name ? name : '';
|
||||
this.children = children ? children : [];
|
||||
this.roles = roles ? roles : [];
|
||||
}
|
||||
},
|
||||
utteranceEntity: class {
|
||||
constructor (name, startPos, endPos, role) {
|
||||
this.entity = name ? name : '';
|
||||
this.startPos = startPos ? startPos : 0;
|
||||
this.endPos = endPos ? endPos : 0
|
||||
}
|
||||
},
|
||||
parserEntity: class {
|
||||
constructor (parent, startPos, entity, value, endPos, type, role) {
|
||||
this.entity = entity ? entity : '';
|
||||
this.value = value ? value : [],
|
||||
this.startPos = startPos ? startPos : 0,
|
||||
this.endPos = endPos ? endPos : 0,
|
||||
this.type = type ? type : 'simple';
|
||||
this.role = role ? role : '';
|
||||
this.parent = parent ? parent : undefined;
|
||||
}
|
||||
},
|
||||
patternAnyEntity: class{
|
||||
constructor(name, explicitList, roles) {
|
||||
this.name = name ? name : '';
|
||||
this.explicitList = explicitList ? explicitList : [];
|
||||
this.roles = roles ? roles : [];
|
||||
}
|
||||
},
|
||||
entityAndRoles: class {
|
||||
constructor(name, type, roles) {
|
||||
this.name = name ? name : '';
|
||||
this.type = type ? type : '';
|
||||
this.roles = roles ? roles : [];
|
||||
}
|
||||
addRoles(roles) {
|
||||
(roles || []).forEach(role => {
|
||||
if (!this.roles.includes(role)) this.roles.push(role)
|
||||
})
|
||||
}
|
||||
hasRole(value) {
|
||||
return this.roles.includes(value);
|
||||
}
|
||||
},
|
||||
featureToModel: class {
|
||||
constructor(name, featureType) {
|
||||
this.featureName = name ? name : '';
|
||||
this.featureType = featureType ? featureType : '';
|
||||
}
|
||||
},
|
||||
modelToFeature: class {
|
||||
constructor(name, featureType) {
|
||||
this.modelName = name ? name : '';
|
||||
this.modelType = featureType ? featureType : '';
|
||||
}
|
||||
},
|
||||
childEntity: class {
|
||||
constructor(name, instanceOf, context, children, features) {
|
||||
this.name = name ? name : '';
|
||||
this.instanceOf = instanceOf ? instanceOf : null;
|
||||
this.children = children ? children : [];
|
||||
this.features = features ? features : '';
|
||||
this.context = context ? context : '';
|
||||
}
|
||||
},
|
||||
entityFeature: class {
|
||||
constructor(name, isRequired) {
|
||||
this.modelName = name ? name : '';
|
||||
this.isRequired = isRequired ? isRequired : false;
|
||||
}
|
||||
},
|
||||
plFeature: class {
|
||||
constructor(name, isRequired) {
|
||||
this.featureName = name ? name : '';
|
||||
this.isRequired = isRequired ? isRequired : false;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = readerObj;
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
/**
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
const QnA = require('./../../qna/qnamaker/kb');
|
||||
const LUIS = require('./../../luis/luis');
|
||||
const qnaAlterations = require('./../../qna/alterations/alterations');
|
||||
class parserObject {
|
||||
/**
|
||||
* @property {FileToParse []} additionalFilesToParse
|
||||
*/
|
||||
/**
|
||||
* @property {LUIS} LUISJsonStructure
|
||||
*/
|
||||
/**
|
||||
* @property {QnA} qnaJsonStructure
|
||||
*/
|
||||
/**
|
||||
* @property {qnaAlterations} qnaAlterations
|
||||
*/
|
||||
/**
|
||||
* @property {string} srcFile
|
||||
*/
|
||||
/**
|
||||
* @property {Boolean} includeInCollate
|
||||
*/
|
||||
constructor() {
|
||||
this.additionalFilesToParse = [];
|
||||
this.LUISJsonStructure = new LUIS();
|
||||
this.qnaJsonStructure = new QnA();
|
||||
this.qnaAlterations = new qnaAlterations();
|
||||
this.srcFile = undefined;
|
||||
this.includeInCollate = true;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Helper method to create a parser object based on arbitrary attributes passed in.
|
||||
* @param {Object} LUISJsonStructure
|
||||
* @param {Object} qnaJsonStructure
|
||||
* @param {Object} lQnaAlterations
|
||||
* @param {Object} srcFile
|
||||
* @param {Object} includeInCollate
|
||||
*/
|
||||
parserObject.create = function(LUISJsonStructure, qnaJsonStructure, lQnaAlterations, srcFile, includeInCollate) {
|
||||
let parserObj = new parserObject();
|
||||
parserObj.LUISJsonStructure = (LUISJsonStructure || new LUIS());
|
||||
parserObj.qnaJsonStructure = (qnaJsonStructure || new QnA());
|
||||
parserObj.qnaAlterations = (lQnaAlterations || new qnaAlterations());
|
||||
parserObj.srcFile = (srcFile || undefined);
|
||||
if(includeInCollate === undefined) parserObj.includeInCollate = true;
|
||||
else parserObj.includeInCollate = includeInCollate;
|
||||
return parserObj;
|
||||
}
|
||||
|
||||
module.exports = parserObject;
|
|
@ -0,0 +1,104 @@
|
|||
/**
|
||||
* Diagnostic class
|
||||
*/
|
||||
class Diagnostic {
|
||||
constructor(range, message, severity = DiagnosticSeverity.ERROR) {
|
||||
this.Message = message;
|
||||
this.Range = range;
|
||||
this.Severity = severity;
|
||||
}
|
||||
|
||||
toString() {
|
||||
if (this.Range === undefined) {
|
||||
return `[${DiagnosticSeverity[this.Severity]}] ${this.Message.toString()}`;
|
||||
}
|
||||
else {
|
||||
return `[${DiagnosticSeverity[this.Severity]}] ${this.Range.toString()}: ${this.Message.toString()}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Range class
|
||||
*/
|
||||
class Range {
|
||||
constructor(start, end) {
|
||||
this.Start = start;
|
||||
this.End = end;
|
||||
}
|
||||
|
||||
toString() {
|
||||
let result = this.Start.toString();
|
||||
if (this.Start.Line <= this.End.Line && this.Start.Character < this.End.Character) {
|
||||
result += ' - ';
|
||||
result += this.End.toString();
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Position class
|
||||
*/
|
||||
class Position {
|
||||
constructor(line, character) {
|
||||
this.Line = line;
|
||||
this.Character = character;
|
||||
}
|
||||
|
||||
toString() { return `line ${this.Line}:${this.Character}` };
|
||||
}
|
||||
|
||||
/**
|
||||
* DiagnosticSeverity enum
|
||||
*/
|
||||
const DiagnosticSeverity = {
|
||||
ERROR: 'ERROR',
|
||||
WARN: 'WARN'
|
||||
}
|
||||
|
||||
const AntlrTokens = {
|
||||
'<EOF>': 'end of file',
|
||||
'MODEL_INFO': 'model description',
|
||||
'NEWLINE': 'new line',
|
||||
'QNA': 'QnA definition',
|
||||
'HASH': 'Intent definition',
|
||||
'DOLLAR': 'Entity definition',
|
||||
'IMPORT_DESC': 'Import statement',
|
||||
'WS': 'Whitespace',
|
||||
'FILTER_MARK': 'QnA filtering section',
|
||||
"'**Filters:**'": 'QnA filtering section',
|
||||
'MULTI_LINE_TEXT': 'multiple line text'
|
||||
}
|
||||
|
||||
const BuildDiagnostic =function(parameter) {
|
||||
let message = parameter.message;
|
||||
const severity = parameter.severity === undefined ? DiagnosticSeverity.ERROR : parameter.severity;
|
||||
|
||||
let range;
|
||||
const rangeInput = parameter.range;
|
||||
const context = parameter.context;
|
||||
if (rangeInput !== undefined) {
|
||||
const startPosition = new Position(rangeInput.Start.Line, rangeInput.Start.Character);
|
||||
const stopPosition = new Position(rangeInput.End.Line, rangeInput.End.Character);
|
||||
range = new Range(startPosition, stopPosition);
|
||||
} else if (context !== undefined) {
|
||||
const startPosition = new Position(context.start.line, context.start.column);
|
||||
const stopPosition = new Position(context.stop.line, context.stop.column + context.stop.text.length);
|
||||
range = new Range(startPosition, stopPosition);
|
||||
} else if (parameter.line !== undefined) {
|
||||
range = new Range(new Position(parameter.line, 0), new Position(parameter.line, 1));
|
||||
}
|
||||
|
||||
return new Diagnostic(range, message, severity);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Diagnostic: Diagnostic,
|
||||
Range: Range,
|
||||
Position: Position,
|
||||
DiagnosticSeverity: DiagnosticSeverity,
|
||||
AntlrTokens: AntlrTokens,
|
||||
BuildDiagnostic: BuildDiagnostic
|
||||
}
|
|
@ -0,0 +1,92 @@
|
|||
const EntitySectionContext = require('./generated/LUFileParser').LUFileParser.EntitySectionContext;
|
||||
const DiagnosticSeverity = require('./diagnostic').DiagnosticSeverity;
|
||||
const BuildDiagnostic = require('./diagnostic').BuildDiagnostic;
|
||||
const LUSectionTypes = require('./../utils/enums/lusectiontypes');
|
||||
const InvalidCharsInIntentOrEntityName = require('./../utils/enums/invalidchars').InvalidCharsInIntentOrEntityName;
|
||||
const BaseSection = require('./baseSection');
|
||||
const Range = require('./diagnostic').Range;
|
||||
const Position = require('./diagnostic').Position;
|
||||
|
||||
class EntitySection extends BaseSection {
|
||||
/**
|
||||
*
|
||||
* @param {EntitySectionContext} parseTree
|
||||
*/
|
||||
constructor(parseTree) {
|
||||
super();
|
||||
this.SectionType = LUSectionTypes.ENTITYSECTION;
|
||||
this.Name = this.ExtractName(parseTree);
|
||||
this.Type = this.ExtractType(parseTree);
|
||||
this.SynonymsOrPhraseList = this.ExtractSynonymsOrPhraseList(parseTree);
|
||||
this.Id = `${this.SectionType}_${this.Name}`;
|
||||
const startPosition = new Position(parseTree.start.line, parseTree.start.column);
|
||||
const stopPosition = new Position(parseTree.stop.line, parseTree.stop.column + parseTree.stop.text.length);
|
||||
this.Range = new Range(startPosition, stopPosition);
|
||||
}
|
||||
|
||||
ExtractName(parseTree) {
|
||||
let entityName;
|
||||
if (parseTree.entityDefinition().entityLine().entityName()) {
|
||||
entityName = parseTree.entityDefinition().entityLine().entityName().getText().trim();
|
||||
} else {
|
||||
this.Errors.push(BuildDiagnostic({
|
||||
message: "Invalid entity line, did you miss entity name after $",
|
||||
context: parseTree.entityDefinition().entityLine()
|
||||
}));
|
||||
}
|
||||
|
||||
if (entityName && InvalidCharsInIntentOrEntityName.some(x => entityName.includes(x))) {
|
||||
this.Errors.push(BuildDiagnostic({
|
||||
message: `Invalid entity line, entity name ${entityName} cannot contain any of the following characters: [<, >, *, %, &, :, \\, $]`,
|
||||
context: parseTree.newEntityDefinition().newEntityLine()
|
||||
}));
|
||||
} else {
|
||||
return entityName;
|
||||
}
|
||||
}
|
||||
|
||||
ExtractType(parseTree) {
|
||||
if (parseTree.entityDefinition().entityLine().entityType()) {
|
||||
return parseTree.entityDefinition().entityLine().entityType().getText().trim();
|
||||
} else {
|
||||
this.Errors.push(BuildDiagnostic({
|
||||
message: "Invalid entity line, did you miss entity type after $",
|
||||
context: parseTree.entityDefinition().entityLine()
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
ExtractSynonymsOrPhraseList(parseTree) {
|
||||
let synonymsOrPhraseList = [];
|
||||
|
||||
if (parseTree.entityDefinition().entityListBody()) {
|
||||
for (const errorItemStr of parseTree.entityDefinition().entityListBody().errorString()) {
|
||||
if (errorItemStr.getText().trim() !== '') {
|
||||
this.Errors.push(BuildDiagnostic({
|
||||
message: "Invalid list entity line, did you miss '-' at line begin",
|
||||
context: errorItemStr
|
||||
}))}
|
||||
}
|
||||
|
||||
for (const normalItemStr of parseTree.entityDefinition().entityListBody().normalItemString()) {
|
||||
var itemStr = normalItemStr.getText().trim();
|
||||
synonymsOrPhraseList.push(itemStr.substr(1).trim());
|
||||
}
|
||||
}
|
||||
|
||||
if (this.Type && this.Type.indexOf('=') > -1 && synonymsOrPhraseList.length === 0) {
|
||||
let errorMsg = `no synonyms list found for list entity definition: "${parseTree.entityDefinition().entityLine().getText()}"`;
|
||||
let error = BuildDiagnostic({
|
||||
message: errorMsg,
|
||||
context: parseTree.entityDefinition().entityLine(),
|
||||
severity: DiagnosticSeverity.WARN
|
||||
})
|
||||
|
||||
this.Errors.push(error);
|
||||
}
|
||||
|
||||
return synonymsOrPhraseList;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = EntitySection;
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -0,0 +1,567 @@
|
|||
// Generated from ../LUFileLexer.g4 by ANTLR 4.8
|
||||
// jshint ignore: start
|
||||
var antlr4 = require('antlr4/index');
|
||||
|
||||
|
||||
|
||||
var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964",
|
||||
"\u0002)\u02ad\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\u0004",
|
||||
"\u0002\t\u0002\u0004\u0003\t\u0003\u0004\u0004\t\u0004\u0004\u0005\t",
|
||||
"\u0005\u0004\u0006\t\u0006\u0004\u0007\t\u0007\u0004\b\t\b\u0004\t\t",
|
||||
"\t\u0004\n\t\n\u0004\u000b\t\u000b\u0004\f\t\f\u0004\r\t\r\u0004\u000e",
|
||||
"\t\u000e\u0004\u000f\t\u000f\u0004\u0010\t\u0010\u0004\u0011\t\u0011",
|
||||
"\u0004\u0012\t\u0012\u0004\u0013\t\u0013\u0004\u0014\t\u0014\u0004\u0015",
|
||||
"\t\u0015\u0004\u0016\t\u0016\u0004\u0017\t\u0017\u0004\u0018\t\u0018",
|
||||
"\u0004\u0019\t\u0019\u0004\u001a\t\u001a\u0004\u001b\t\u001b\u0004\u001c",
|
||||
"\t\u001c\u0004\u001d\t\u001d\u0004\u001e\t\u001e\u0004\u001f\t\u001f",
|
||||
"\u0004 \t \u0004!\t!\u0004\"\t\"\u0004#\t#\u0004$\t$\u0004%\t%\u0004",
|
||||
"&\t&\u0004\'\t\'\u0004(\t(\u0004)\t)\u0004*\t*\u0004+\t+\u0004,\t,\u0004",
|
||||
"-\t-\u0004.\t.\u0004/\t/\u00040\t0\u00041\t1\u00042\t2\u00043\t3\u0004",
|
||||
"4\t4\u00045\t5\u00046\t6\u00047\t7\u00048\t8\u00049\t9\u0004:\t:\u0004",
|
||||
";\t;\u0004<\t<\u0004=\t=\u0004>\t>\u0004?\t?\u0004@\t@\u0004A\tA\u0004",
|
||||
"B\tB\u0004C\tC\u0004D\tD\u0004E\tE\u0004F\tF\u0004G\tG\u0004H\tH\u0004",
|
||||
"I\tI\u0004J\tJ\u0004K\tK\u0004L\tL\u0004M\tM\u0003\u0002\u0003\u0002",
|
||||
"\u0003\u0003\u0003\u0003\u0003\u0004\u0003\u0004\u0003\u0005\u0003\u0005",
|
||||
"\u0003\u0006\u0003\u0006\u0003\u0007\u0003\u0007\u0003\b\u0003\b\u0003",
|
||||
"\t\u0003\t\u0003\n\u0003\n\u0003\u000b\u0003\u000b\u0003\f\u0003\f\u0003",
|
||||
"\r\u0003\r\u0003\u000e\u0003\u000e\u0003\u000f\u0003\u000f\u0003\u0010",
|
||||
"\u0003\u0010\u0003\u0011\u0003\u0011\u0003\u0012\u0003\u0012\u0003\u0013",
|
||||
"\u0003\u0013\u0003\u0014\u0003\u0014\u0003\u0015\u0003\u0015\u0003\u0016",
|
||||
"\u0003\u0016\u0003\u0017\u0003\u0017\u0003\u0018\u0003\u0018\u0003\u0019",
|
||||
"\u0003\u0019\u0003\u001a\u0003\u001a\u0003\u001b\u0003\u001b\u0003\u001c",
|
||||
"\u0003\u001c\u0003\u001d\u0003\u001d\u0003\u001e\u0003\u001e\u0003\u001f",
|
||||
"\u0003\u001f\u0003 \u0006 \u00de\n \r \u000e \u00df\u0003!\u0005!\u00e3",
|
||||
"\n!\u0003!\u0003!\u0003!\u0003!\u0003\"\u0007\"\u00ea\n\"\f\"\u000e",
|
||||
"\"\u00ed\u000b\"\u0003\"\u0003\"\u0007\"\u00f1\n\"\f\"\u000e\"\u00f4",
|
||||
"\u000b\"\u0003\"\u0003\"\u0003\"\u0003\"\u0003\"\u0003\"\u0003\"\u0003",
|
||||
"\"\u0003\"\u0003\"\u0003\"\u0003\"\u0003\"\u0003\"\u0003\"\u0003\"\u0003",
|
||||
"\"\u0003\"\u0003\"\u0003\"\u0003\"\u0007\"\u010b\n\"\f\"\u000e\"\u010e",
|
||||
"\u000b\"\u0003\"\u0003\"\u0006\"\u0112\n\"\r\"\u000e\"\u0113\u0003#",
|
||||
"\u0007#\u0117\n#\f#\u000e#\u011a\u000b#\u0003#\u0003#\u0007#\u011e\n",
|
||||
"#\f#\u000e#\u0121\u000b#\u0003#\u0003#\u0003#\u0003#\u0006#\u0127\n",
|
||||
"#\r#\u000e#\u0128\u0003$\u0007$\u012c\n$\f$\u000e$\u012f\u000b$\u0003",
|
||||
"$\u0003$\u0007$\u0133\n$\f$\u000e$\u0136\u000b$\u0003$\u0003$\u0003",
|
||||
"%\u0006%\u013b\n%\r%\u000e%\u013c\u0003%\u0007%\u0140\n%\f%\u000e%\u0143",
|
||||
"\u000b%\u0003%\u0003%\u0003%\u0003%\u0003&\u0003&\u0003&\u0003&\u0003",
|
||||
"\'\u0003\'\u0003\'\u0003\'\u0003(\u0003(\u0003(\u0003(\u0003)\u0003",
|
||||
")\u0003)\u0003)\u0003*\u0003*\u0007*\u015b\n*\f*\u000e*\u015e\u000b",
|
||||
"*\u0003*\u0003*\u0003*\u0007*\u0163\n*\f*\u000e*\u0166\u000b*\u0003",
|
||||
"*\u0003*\u0003+\u0003+\u0003+\u0003+\u0003+\u0003+\u0003+\u0003+\u0003",
|
||||
"+\u0003+\u0003+\u0003+\u0003+\u0003+\u0003,\u0003,\u0003,\u0003,\u0007",
|
||||
",\u017c\n,\f,\u000e,\u017f\u000b,\u0003,\u0003,\u0003,\u0003,\u0003",
|
||||
",\u0003-\u0003-\u0003-\u0003-\u0003-\u0007-\u018b\n-\f-\u000e-\u018e",
|
||||
"\u000b-\u0003-\u0003-\u0003-\u0003-\u0003.\u0003.\u0003.\u0003.\u0003",
|
||||
".\u0003.\u0003.\u0003.\u0003.\u0003.\u0003.\u0003.\u0003.\u0003.\u0003",
|
||||
"/\u0003/\u00030\u00030\u00030\u00030\u00031\u00051\u01a9\n1\u00031\u0003",
|
||||
"1\u00031\u00031\u00031\u00032\u00032\u00033\u00033\u00034\u00034\u0003",
|
||||
"4\u00034\u00034\u00034\u00034\u00034\u00054\u01bc\n4\u00035\u00035\u0003",
|
||||
"5\u00035\u00035\u00035\u00035\u00035\u00035\u00035\u00035\u00035\u0005",
|
||||
"5\u01ca\n5\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u0003",
|
||||
"6\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u0003",
|
||||
"6\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u0003",
|
||||
"6\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u0003",
|
||||
"6\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u0003",
|
||||
"6\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u0003",
|
||||
"6\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u00036\u0003",
|
||||
"6\u00056\u0211\n6\u00037\u00037\u00077\u0215\n7\f7\u000e7\u0218\u000b",
|
||||
"7\u00037\u00037\u00038\u00038\u00078\u021e\n8\f8\u000e8\u0221\u000b",
|
||||
"8\u00038\u00038\u00039\u00039\u00079\u0227\n9\f9\u000e9\u022a\u000b",
|
||||
"9\u0003:\u0006:\u022d\n:\r:\u000e:\u022e\u0003;\u0003;\u0006;\u0233",
|
||||
"\n;\r;\u000e;\u0234\u0003;\u0003;\u0003<\u0003<\u0003<\u0003<\u0003",
|
||||
"=\u0003=\u0003=\u0003=\u0003>\u0005>\u0242\n>\u0003>\u0003>\u0003>\u0003",
|
||||
">\u0003>\u0003?\u0003?\u0003?\u0005?\u024c\n?\u0003?\u0003?\u0003?\u0007",
|
||||
"?\u0251\n?\f?\u000e?\u0254\u000b?\u0003@\u0003@\u0003A\u0003A\u0003",
|
||||
"A\u0003A\u0003B\u0005B\u025d\nB\u0003B\u0003B\u0003B\u0003B\u0003B\u0003",
|
||||
"C\u0003C\u0005C\u0266\nC\u0003D\u0003D\u0003D\u0003D\u0007D\u026c\n",
|
||||
"D\fD\u000eD\u026f\u000bD\u0003D\u0007D\u0272\nD\fD\u000eD\u0275\u000b",
|
||||
"D\u0003D\u0003D\u0003E\u0006E\u027a\nE\rE\u000eE\u027b\u0003F\u0003",
|
||||
"F\u0003F\u0003F\u0003G\u0005G\u0283\nG\u0003G\u0003G\u0003G\u0003G\u0003",
|
||||
"G\u0003H\u0003H\u0007H\u028c\nH\fH\u000eH\u028f\u000bH\u0003H\u0003",
|
||||
"H\u0003I\u0003I\u0007I\u0295\nI\fI\u000eI\u0298\u000bI\u0003J\u0006",
|
||||
"J\u029b\nJ\rJ\u000eJ\u029c\u0003K\u0003K\u0003L\u0005L\u02a2\nL\u0003",
|
||||
"L\u0003L\u0003L\u0003L\u0003L\u0003M\u0006M\u02aa\nM\rM\u000eM\u02ab",
|
||||
"\u0007\u015c\u0164\u017d\u018c\u027b\u0002N\b\u0002\n\u0002\f\u0002",
|
||||
"\u000e\u0002\u0010\u0002\u0012\u0002\u0014\u0002\u0016\u0002\u0018\u0002",
|
||||
"\u001a\u0002\u001c\u0002\u001e\u0002 \u0002\"\u0002$\u0002&\u0002(\u0002",
|
||||
"*\u0002,\u0002.\u00020\u00022\u00024\u00026\u00028\u0002:\u0002<\u0002",
|
||||
">\u0002@\u0002B\u0002D\u0003F\u0004H\u0005J\u0006L\u0007N\bP\tR\nT\u000b",
|
||||
"V\fX\rZ\u000e\\\u000f^\u0010`\u0011b\u0012d\u0002f\u0002h\u0013j\u0014",
|
||||
"l\u0015n\u0016p\u0017r\u0018t\u0019v\u001ax\u001bz\u001c|\u0002~\u0002",
|
||||
"\u0080\u001d\u0082\u001e\u0084\u001f\u0086\u0002\u0088\u0002\u008a ",
|
||||
"\u008c!\u008e\"\u0090\u0002\u0092#\u0094$\u0096%\u0098&\u009a\'\u009c",
|
||||
"(\u009e)\b\u0002\u0003\u0004\u0005\u0006\u0007+\u0004\u0002CCcc\u0004",
|
||||
"\u0002DDdd\u0004\u0002EEee\u0004\u0002FFff\u0004\u0002GGgg\u0004\u0002",
|
||||
"HHhh\u0004\u0002IIii\u0004\u0002JJjj\u0004\u0002KKkk\u0004\u0002LLl",
|
||||
"l\u0004\u0002MMmm\u0004\u0002NNnn\u0004\u0002OOoo\u0004\u0002PPpp\u0004",
|
||||
"\u0002QQqq\u0004\u0002RRrr\u0004\u0002SSss\u0004\u0002TTtt\u0004\u0002",
|
||||
"UUuu\u0004\u0002VVvv\u0004\u0002WWww\u0004\u0002XXxx\u0004\u0002YYy",
|
||||
"y\u0004\u0002ZZzz\u0004\u0002[[{{\u0004\u0002\\\\||\u0004\u0002C\\c",
|
||||
"|\u0006\u0002\u000b\u000b\"\"\u00a2\u00a2\uff01\uff01\u0004\u0002,-",
|
||||
"//\u0004\u0002\f\f\u000f\u000f\u0006\u0002\f\f\u000f\u000f]]__\u0005",
|
||||
"\u0002\f\f\u000f\u000f*+\t\u0002\f\f\u000f\u000f*+]]__}}\u007f\u007f",
|
||||
"\t\u0002\u000b\f\u000f\u000f\"\"$$))..==\u0004\u0002$$))\b\u0002\u000b",
|
||||
"\f\u000f\u000f$$))..==\u0004\u0002//aa\u0006\u0002\f\f\u000f\u000f}",
|
||||
"}\u007f\u007f\u0006\u0002\u000b\f\u000f\u000f\"\"^^\u0006\u0002\u000b",
|
||||
"\f\u000f\u000f\"\"<<\u0004\u0002\u000b\f\u000f\u000f\u0002\u02bc\u0002",
|
||||
"D\u0003\u0002\u0002\u0002\u0002F\u0003\u0002\u0002\u0002\u0002H\u0003",
|
||||
"\u0002\u0002\u0002\u0002J\u0003\u0002\u0002\u0002\u0002L\u0003\u0002",
|
||||
"\u0002\u0002\u0002N\u0003\u0002\u0002\u0002\u0002P\u0003\u0002\u0002",
|
||||
"\u0002\u0002R\u0003\u0002\u0002\u0002\u0002T\u0003\u0002\u0002\u0002",
|
||||
"\u0002V\u0003\u0002\u0002\u0002\u0002X\u0003\u0002\u0002\u0002\u0002",
|
||||
"Z\u0003\u0002\u0002\u0002\u0002\\\u0003\u0002\u0002\u0002\u0002^\u0003",
|
||||
"\u0002\u0002\u0002\u0002`\u0003\u0002\u0002\u0002\u0002b\u0003\u0002",
|
||||
"\u0002\u0002\u0003d\u0003\u0002\u0002\u0002\u0003f\u0003\u0002\u0002",
|
||||
"\u0002\u0003h\u0003\u0002\u0002\u0002\u0003j\u0003\u0002\u0002\u0002",
|
||||
"\u0003l\u0003\u0002\u0002\u0002\u0003n\u0003\u0002\u0002\u0002\u0003",
|
||||
"p\u0003\u0002\u0002\u0002\u0003r\u0003\u0002\u0002\u0002\u0003t\u0003",
|
||||
"\u0002\u0002\u0002\u0003v\u0003\u0002\u0002\u0002\u0003x\u0003\u0002",
|
||||
"\u0002\u0002\u0003z\u0003\u0002\u0002\u0002\u0004|\u0003\u0002\u0002",
|
||||
"\u0002\u0004~\u0003\u0002\u0002\u0002\u0004\u0080\u0003\u0002\u0002",
|
||||
"\u0002\u0004\u0082\u0003\u0002\u0002\u0002\u0004\u0084\u0003\u0002\u0002",
|
||||
"\u0002\u0005\u0086\u0003\u0002\u0002\u0002\u0005\u0088\u0003\u0002\u0002",
|
||||
"\u0002\u0005\u008a\u0003\u0002\u0002\u0002\u0005\u008c\u0003\u0002\u0002",
|
||||
"\u0002\u0005\u008e\u0003\u0002\u0002\u0002\u0006\u0090\u0003\u0002\u0002",
|
||||
"\u0002\u0006\u0092\u0003\u0002\u0002\u0002\u0006\u0094\u0003\u0002\u0002",
|
||||
"\u0002\u0006\u0096\u0003\u0002\u0002\u0002\u0006\u0098\u0003\u0002\u0002",
|
||||
"\u0002\u0006\u009a\u0003\u0002\u0002\u0002\u0007\u009c\u0003\u0002\u0002",
|
||||
"\u0002\u0007\u009e\u0003\u0002\u0002\u0002\b\u00a0\u0003\u0002\u0002",
|
||||
"\u0002\n\u00a2\u0003\u0002\u0002\u0002\f\u00a4\u0003\u0002\u0002\u0002",
|
||||
"\u000e\u00a6\u0003\u0002\u0002\u0002\u0010\u00a8\u0003\u0002\u0002\u0002",
|
||||
"\u0012\u00aa\u0003\u0002\u0002\u0002\u0014\u00ac\u0003\u0002\u0002\u0002",
|
||||
"\u0016\u00ae\u0003\u0002\u0002\u0002\u0018\u00b0\u0003\u0002\u0002\u0002",
|
||||
"\u001a\u00b2\u0003\u0002\u0002\u0002\u001c\u00b4\u0003\u0002\u0002\u0002",
|
||||
"\u001e\u00b6\u0003\u0002\u0002\u0002 \u00b8\u0003\u0002\u0002\u0002",
|
||||
"\"\u00ba\u0003\u0002\u0002\u0002$\u00bc\u0003\u0002\u0002\u0002&\u00be",
|
||||
"\u0003\u0002\u0002\u0002(\u00c0\u0003\u0002\u0002\u0002*\u00c2\u0003",
|
||||
"\u0002\u0002\u0002,\u00c4\u0003\u0002\u0002\u0002.\u00c6\u0003\u0002",
|
||||
"\u0002\u00020\u00c8\u0003\u0002\u0002\u00022\u00ca\u0003\u0002\u0002",
|
||||
"\u00024\u00cc\u0003\u0002\u0002\u00026\u00ce\u0003\u0002\u0002\u0002",
|
||||
"8\u00d0\u0003\u0002\u0002\u0002:\u00d2\u0003\u0002\u0002\u0002<\u00d4",
|
||||
"\u0003\u0002\u0002\u0002>\u00d6\u0003\u0002\u0002\u0002@\u00d8\u0003",
|
||||
"\u0002\u0002\u0002B\u00da\u0003\u0002\u0002\u0002D\u00dd\u0003\u0002",
|
||||
"\u0002\u0002F\u00e2\u0003\u0002\u0002\u0002H\u00eb\u0003\u0002\u0002",
|
||||
"\u0002J\u0118\u0003\u0002\u0002\u0002L\u012d\u0003\u0002\u0002\u0002",
|
||||
"N\u013a\u0003\u0002\u0002\u0002P\u0148\u0003\u0002\u0002\u0002R\u014c",
|
||||
"\u0003\u0002\u0002\u0002T\u0150\u0003\u0002\u0002\u0002V\u0154\u0003",
|
||||
"\u0002\u0002\u0002X\u0158\u0003\u0002\u0002\u0002Z\u0169\u0003\u0002",
|
||||
"\u0002\u0002\\\u0177\u0003\u0002\u0002\u0002^\u0185\u0003\u0002\u0002",
|
||||
"\u0002`\u0193\u0003\u0002\u0002\u0002b\u01a1\u0003\u0002\u0002\u0002",
|
||||
"d\u01a3\u0003\u0002\u0002\u0002f\u01a8\u0003\u0002\u0002\u0002h\u01af",
|
||||
"\u0003\u0002\u0002\u0002j\u01b1\u0003\u0002\u0002\u0002l\u01b3\u0003",
|
||||
"\u0002\u0002\u0002n\u01bd\u0003\u0002\u0002\u0002p\u0210\u0003\u0002",
|
||||
"\u0002\u0002r\u0212\u0003\u0002\u0002\u0002t\u021b\u0003\u0002\u0002",
|
||||
"\u0002v\u0224\u0003\u0002\u0002\u0002x\u022c\u0003\u0002\u0002\u0002",
|
||||
"z\u0230\u0003\u0002\u0002\u0002|\u0238\u0003\u0002\u0002\u0002~\u023c",
|
||||
"\u0003\u0002\u0002\u0002\u0080\u0241\u0003\u0002\u0002\u0002\u0082\u024b",
|
||||
"\u0003\u0002\u0002\u0002\u0084\u0255\u0003\u0002\u0002\u0002\u0086\u0257",
|
||||
"\u0003\u0002\u0002\u0002\u0088\u025c\u0003\u0002\u0002\u0002\u008a\u0263",
|
||||
"\u0003\u0002\u0002\u0002\u008c\u0267\u0003\u0002\u0002\u0002\u008e\u0279",
|
||||
"\u0003\u0002\u0002\u0002\u0090\u027d\u0003\u0002\u0002\u0002\u0092\u0282",
|
||||
"\u0003\u0002\u0002\u0002\u0094\u0289\u0003\u0002\u0002\u0002\u0096\u0292",
|
||||
"\u0003\u0002\u0002\u0002\u0098\u029a\u0003\u0002\u0002\u0002\u009a\u029e",
|
||||
"\u0003\u0002\u0002\u0002\u009c\u02a1\u0003\u0002\u0002\u0002\u009e\u02a9",
|
||||
"\u0003\u0002\u0002\u0002\u00a0\u00a1\t\u0002\u0002\u0002\u00a1\t\u0003",
|
||||
"\u0002\u0002\u0002\u00a2\u00a3\t\u0003\u0002\u0002\u00a3\u000b\u0003",
|
||||
"\u0002\u0002\u0002\u00a4\u00a5\t\u0004\u0002\u0002\u00a5\r\u0003\u0002",
|
||||
"\u0002\u0002\u00a6\u00a7\t\u0005\u0002\u0002\u00a7\u000f\u0003\u0002",
|
||||
"\u0002\u0002\u00a8\u00a9\t\u0006\u0002\u0002\u00a9\u0011\u0003\u0002",
|
||||
"\u0002\u0002\u00aa\u00ab\t\u0007\u0002\u0002\u00ab\u0013\u0003\u0002",
|
||||
"\u0002\u0002\u00ac\u00ad\t\b\u0002\u0002\u00ad\u0015\u0003\u0002\u0002",
|
||||
"\u0002\u00ae\u00af\t\t\u0002\u0002\u00af\u0017\u0003\u0002\u0002\u0002",
|
||||
"\u00b0\u00b1\t\n\u0002\u0002\u00b1\u0019\u0003\u0002\u0002\u0002\u00b2",
|
||||
"\u00b3\t\u000b\u0002\u0002\u00b3\u001b\u0003\u0002\u0002\u0002\u00b4",
|
||||
"\u00b5\t\f\u0002\u0002\u00b5\u001d\u0003\u0002\u0002\u0002\u00b6\u00b7",
|
||||
"\t\r\u0002\u0002\u00b7\u001f\u0003\u0002\u0002\u0002\u00b8\u00b9\t\u000e",
|
||||
"\u0002\u0002\u00b9!\u0003\u0002\u0002\u0002\u00ba\u00bb\t\u000f\u0002",
|
||||
"\u0002\u00bb#\u0003\u0002\u0002\u0002\u00bc\u00bd\t\u0010\u0002\u0002",
|
||||
"\u00bd%\u0003\u0002\u0002\u0002\u00be\u00bf\t\u0011\u0002\u0002\u00bf",
|
||||
"\'\u0003\u0002\u0002\u0002\u00c0\u00c1\t\u0012\u0002\u0002\u00c1)\u0003",
|
||||
"\u0002\u0002\u0002\u00c2\u00c3\t\u0013\u0002\u0002\u00c3+\u0003\u0002",
|
||||
"\u0002\u0002\u00c4\u00c5\t\u0014\u0002\u0002\u00c5-\u0003\u0002\u0002",
|
||||
"\u0002\u00c6\u00c7\t\u0015\u0002\u0002\u00c7/\u0003\u0002\u0002\u0002",
|
||||
"\u00c8\u00c9\t\u0016\u0002\u0002\u00c91\u0003\u0002\u0002\u0002\u00ca",
|
||||
"\u00cb\t\u0017\u0002\u0002\u00cb3\u0003\u0002\u0002\u0002\u00cc\u00cd",
|
||||
"\t\u0018\u0002\u0002\u00cd5\u0003\u0002\u0002\u0002\u00ce\u00cf\t\u0019",
|
||||
"\u0002\u0002\u00cf7\u0003\u0002\u0002\u0002\u00d0\u00d1\t\u001a\u0002",
|
||||
"\u0002\u00d19\u0003\u0002\u0002\u0002\u00d2\u00d3\t\u001b\u0002\u0002",
|
||||
"\u00d3;\u0003\u0002\u0002\u0002\u00d4\u00d5\t\u001c\u0002\u0002\u00d5",
|
||||
"=\u0003\u0002\u0002\u0002\u00d6\u00d7\u00042;\u0002\u00d7?\u0003\u0002",
|
||||
"\u0002\u0002\u00d8\u00d9\t\u001d\u0002\u0002\u00d9A\u0003\u0002\u0002",
|
||||
"\u0002\u00da\u00db\t\u001e\u0002\u0002\u00dbC\u0003\u0002\u0002\u0002",
|
||||
"\u00dc\u00de\u0005@\u001e\u0002\u00dd\u00dc\u0003\u0002\u0002\u0002",
|
||||
"\u00de\u00df\u0003\u0002\u0002\u0002\u00df\u00dd\u0003\u0002\u0002\u0002",
|
||||
"\u00df\u00e0\u0003\u0002\u0002\u0002\u00e0E\u0003\u0002\u0002\u0002",
|
||||
"\u00e1\u00e3\u0007\u000f\u0002\u0002\u00e2\u00e1\u0003\u0002\u0002\u0002",
|
||||
"\u00e2\u00e3\u0003\u0002\u0002\u0002\u00e3\u00e4\u0003\u0002\u0002\u0002",
|
||||
"\u00e4\u00e5\u0007\f\u0002\u0002\u00e5\u00e6\u0003\u0002\u0002\u0002",
|
||||
"\u00e6\u00e7\b!\u0002\u0002\u00e7G\u0003\u0002\u0002\u0002\u00e8\u00ea",
|
||||
"\u0005D \u0002\u00e9\u00e8\u0003\u0002\u0002\u0002\u00ea\u00ed\u0003",
|
||||
"\u0002\u0002\u0002\u00eb\u00e9\u0003\u0002\u0002\u0002\u00eb\u00ec\u0003",
|
||||
"\u0002\u0002\u0002\u00ec\u00ee\u0003\u0002\u0002\u0002\u00ed\u00eb\u0003",
|
||||
"\u0002\u0002\u0002\u00ee\u00f2\u0007@\u0002\u0002\u00ef\u00f1\u0005",
|
||||
"D \u0002\u00f0\u00ef\u0003\u0002\u0002\u0002\u00f1\u00f4\u0003\u0002",
|
||||
"\u0002\u0002\u00f2\u00f0\u0003\u0002\u0002\u0002\u00f2\u00f3\u0003\u0002",
|
||||
"\u0002\u0002\u00f3\u00f5\u0003\u0002\u0002\u0002\u00f4\u00f2\u0003\u0002",
|
||||
"\u0002\u0002\u00f5\u00f6\u0007#\u0002\u0002\u00f6\u00f7\u0007%\u0002",
|
||||
"\u0002\u00f7\u00f8\u0007\"\u0002\u0002\u00f8\u00f9\u0007B\u0002\u0002",
|
||||
"\u00f9\u00fa\u0007s\u0002\u0002\u00fa\u00fb\u0007p\u0002\u0002\u00fb",
|
||||
"\u00fc\u0007c\u0002\u0002\u00fc\u00fd\u00070\u0002\u0002\u00fd\u00fe",
|
||||
"\u0007r\u0002\u0002\u00fe\u00ff\u0007c\u0002\u0002\u00ff\u0100\u0007",
|
||||
"k\u0002\u0002\u0100\u0101\u0007t\u0002\u0002\u0101\u0102\u00070\u0002",
|
||||
"\u0002\u0102\u0103\u0007u\u0002\u0002\u0103\u0104\u0007q\u0002\u0002",
|
||||
"\u0104\u0105\u0007w\u0002\u0002\u0105\u0106\u0007t\u0002\u0002\u0106",
|
||||
"\u0107\u0007e\u0002\u0002\u0107\u0108\u0007g\u0002\u0002\u0108\u010c",
|
||||
"\u0003\u0002\u0002\u0002\u0109\u010b\u0005D \u0002\u010a\u0109\u0003",
|
||||
"\u0002\u0002\u0002\u010b\u010e\u0003\u0002\u0002\u0002\u010c\u010a\u0003",
|
||||
"\u0002\u0002\u0002\u010c\u010d\u0003\u0002\u0002\u0002\u010d\u010f\u0003",
|
||||
"\u0002\u0002\u0002\u010e\u010c\u0003\u0002\u0002\u0002\u010f\u0111\u0007",
|
||||
"?\u0002\u0002\u0110\u0112\n\u001f\u0002\u0002\u0111\u0110\u0003\u0002",
|
||||
"\u0002\u0002\u0112\u0113\u0003\u0002\u0002\u0002\u0113\u0111\u0003\u0002",
|
||||
"\u0002\u0002\u0113\u0114\u0003\u0002\u0002\u0002\u0114I\u0003\u0002",
|
||||
"\u0002\u0002\u0115\u0117\u0005D \u0002\u0116\u0115\u0003\u0002\u0002",
|
||||
"\u0002\u0117\u011a\u0003\u0002\u0002\u0002\u0118\u0116\u0003\u0002\u0002",
|
||||
"\u0002\u0118\u0119\u0003\u0002\u0002\u0002\u0119\u011b\u0003\u0002\u0002",
|
||||
"\u0002\u011a\u0118\u0003\u0002\u0002\u0002\u011b\u011f\u0007@\u0002",
|
||||
"\u0002\u011c\u011e\u0005D \u0002\u011d\u011c\u0003\u0002\u0002\u0002",
|
||||
"\u011e\u0121\u0003\u0002\u0002\u0002\u011f\u011d\u0003\u0002\u0002\u0002",
|
||||
"\u011f\u0120\u0003\u0002\u0002\u0002\u0120\u0122\u0003\u0002\u0002\u0002",
|
||||
"\u0121\u011f\u0003\u0002\u0002\u0002\u0122\u0123\u0007#\u0002\u0002",
|
||||
"\u0123\u0124\u0007%\u0002\u0002\u0124\u0126\u0003\u0002\u0002\u0002",
|
||||
"\u0125\u0127\n\u001f\u0002\u0002\u0126\u0125\u0003\u0002\u0002\u0002",
|
||||
"\u0127\u0128\u0003\u0002\u0002\u0002\u0128\u0126\u0003\u0002\u0002\u0002",
|
||||
"\u0128\u0129\u0003\u0002\u0002\u0002\u0129K\u0003\u0002\u0002\u0002",
|
||||
"\u012a\u012c\u0005D \u0002\u012b\u012a\u0003\u0002\u0002\u0002\u012c",
|
||||
"\u012f\u0003\u0002\u0002\u0002\u012d\u012b\u0003\u0002\u0002\u0002\u012d",
|
||||
"\u012e\u0003\u0002\u0002\u0002\u012e\u0130\u0003\u0002\u0002\u0002\u012f",
|
||||
"\u012d\u0003\u0002\u0002\u0002\u0130\u0134\u0007@\u0002\u0002\u0131",
|
||||
"\u0133\n\u001f\u0002\u0002\u0132\u0131\u0003\u0002\u0002\u0002\u0133",
|
||||
"\u0136\u0003\u0002\u0002\u0002\u0134\u0132\u0003\u0002\u0002\u0002\u0134",
|
||||
"\u0135\u0003\u0002\u0002\u0002\u0135\u0137\u0003\u0002\u0002\u0002\u0136",
|
||||
"\u0134\u0003\u0002\u0002\u0002\u0137\u0138\b$\u0002\u0002\u0138M\u0003",
|
||||
"\u0002\u0002\u0002\u0139\u013b\u0007%\u0002\u0002\u013a\u0139\u0003",
|
||||
"\u0002\u0002\u0002\u013b\u013c\u0003\u0002\u0002\u0002\u013c\u013a\u0003",
|
||||
"\u0002\u0002\u0002\u013c\u013d\u0003\u0002\u0002\u0002\u013d\u0141\u0003",
|
||||
"\u0002\u0002\u0002\u013e\u0140\u0005D \u0002\u013f\u013e\u0003\u0002",
|
||||
"\u0002\u0002\u0140\u0143\u0003\u0002\u0002\u0002\u0141\u013f\u0003\u0002",
|
||||
"\u0002\u0002\u0141\u0142\u0003\u0002\u0002\u0002\u0142\u0144\u0003\u0002",
|
||||
"\u0002\u0002\u0143\u0141\u0003\u0002\u0002\u0002\u0144\u0145\u0007A",
|
||||
"\u0002\u0002\u0145\u0146\u0003\u0002\u0002\u0002\u0146\u0147\b%\u0003",
|
||||
"\u0002\u0147O\u0003\u0002\u0002\u0002\u0148\u0149\u0007%\u0002\u0002",
|
||||
"\u0149\u014a\u0003\u0002\u0002\u0002\u014a\u014b\b&\u0004\u0002\u014b",
|
||||
"Q\u0003\u0002\u0002\u0002\u014c\u014d\u0005B\u001f\u0002\u014d\u014e",
|
||||
"\u0003\u0002\u0002\u0002\u014e\u014f\b\'\u0005\u0002\u014fS\u0003\u0002",
|
||||
"\u0002\u0002\u0150\u0151\u0007&\u0002\u0002\u0151\u0152\u0003\u0002",
|
||||
"\u0002\u0002\u0152\u0153\b(\u0006\u0002\u0153U\u0003\u0002\u0002\u0002",
|
||||
"\u0154\u0155\u0007B\u0002\u0002\u0155\u0156\u0003\u0002\u0002\u0002",
|
||||
"\u0156\u0157\b)\u0007\u0002\u0157W\u0003\u0002\u0002\u0002\u0158\u015c",
|
||||
"\u0007]\u0002\u0002\u0159\u015b\n \u0002\u0002\u015a\u0159\u0003\u0002",
|
||||
"\u0002\u0002\u015b\u015e\u0003\u0002\u0002\u0002\u015c\u015d\u0003\u0002",
|
||||
"\u0002\u0002\u015c\u015a\u0003\u0002\u0002\u0002\u015d\u015f\u0003\u0002",
|
||||
"\u0002\u0002\u015e\u015c\u0003\u0002\u0002\u0002\u015f\u0160\u0007_",
|
||||
"\u0002\u0002\u0160\u0164\u0007*\u0002\u0002\u0161\u0163\n!\u0002\u0002",
|
||||
"\u0162\u0161\u0003\u0002\u0002\u0002\u0163\u0166\u0003\u0002\u0002\u0002",
|
||||
"\u0164\u0165\u0003\u0002\u0002\u0002\u0164\u0162\u0003\u0002\u0002\u0002",
|
||||
"\u0165\u0167\u0003\u0002\u0002\u0002\u0166\u0164\u0003\u0002\u0002\u0002",
|
||||
"\u0167\u0168\u0007+\u0002\u0002\u0168Y\u0003\u0002\u0002\u0002\u0169",
|
||||
"\u016a\u0007,\u0002\u0002\u016a\u016b\u0007,\u0002\u0002\u016b\u016c",
|
||||
"\u0003\u0002\u0002\u0002\u016c\u016d\u0005\u0012\u0007\u0002\u016d\u016e",
|
||||
"\u0005\u0018\n\u0002\u016e\u016f\u0005\u001e\r\u0002\u016f\u0170\u0005",
|
||||
".\u0015\u0002\u0170\u0171\u0005\u0010\u0006\u0002\u0171\u0172\u0005",
|
||||
"*\u0013\u0002\u0172\u0173\u0005,\u0014\u0002\u0173\u0174\u0007<\u0002",
|
||||
"\u0002\u0174\u0175\u0007,\u0002\u0002\u0175\u0176\u0007,\u0002\u0002",
|
||||
"\u0176[\u0003\u0002\u0002\u0002\u0177\u0178\u0007>\u0002\u0002\u0178",
|
||||
"\u0179\u0007c\u0002\u0002\u0179\u017d\u0003\u0002\u0002\u0002\u017a",
|
||||
"\u017c\u000b\u0002\u0002\u0002\u017b\u017a\u0003\u0002\u0002\u0002\u017c",
|
||||
"\u017f\u0003\u0002\u0002\u0002\u017d\u017e\u0003\u0002\u0002\u0002\u017d",
|
||||
"\u017b\u0003\u0002\u0002\u0002\u017e\u0180\u0003\u0002\u0002\u0002\u017f",
|
||||
"\u017d\u0003\u0002\u0002\u0002\u0180\u0181\u0007>\u0002\u0002\u0181",
|
||||
"\u0182\u00071\u0002\u0002\u0182\u0183\u0007c\u0002\u0002\u0183\u0184",
|
||||
"\u0007@\u0002\u0002\u0184]\u0003\u0002\u0002\u0002\u0185\u0186\u0007",
|
||||
"b\u0002\u0002\u0186\u0187\u0007b\u0002\u0002\u0187\u0188\u0007b\u0002",
|
||||
"\u0002\u0188\u018c\u0003\u0002\u0002\u0002\u0189\u018b\u000b\u0002\u0002",
|
||||
"\u0002\u018a\u0189\u0003\u0002\u0002\u0002\u018b\u018e\u0003\u0002\u0002",
|
||||
"\u0002\u018c\u018d\u0003\u0002\u0002\u0002\u018c\u018a\u0003\u0002\u0002",
|
||||
"\u0002\u018d\u018f\u0003\u0002\u0002\u0002\u018e\u018c\u0003\u0002\u0002",
|
||||
"\u0002\u018f\u0190\u0007b\u0002\u0002\u0190\u0191\u0007b\u0002\u0002",
|
||||
"\u0191\u0192\u0007b\u0002\u0002\u0192_\u0003\u0002\u0002\u0002\u0193",
|
||||
"\u0194\u0007,\u0002\u0002\u0194\u0195\u0007,\u0002\u0002\u0195\u0196",
|
||||
"\u0003\u0002\u0002\u0002\u0196\u0197\u0005&\u0011\u0002\u0197\u0198",
|
||||
"\u0005*\u0013\u0002\u0198\u0199\u0005$\u0010\u0002\u0199\u019a\u0005",
|
||||
" \u000e\u0002\u019a\u019b\u0005&\u0011\u0002\u019b\u019c\u0005.\u0015",
|
||||
"\u0002\u019c\u019d\u0005,\u0014\u0002\u019d\u019e\u0007<\u0002\u0002",
|
||||
"\u019e\u019f\u0007,\u0002\u0002\u019f\u01a0\u0007,\u0002\u0002\u01a0",
|
||||
"a\u0003\u0002\u0002\u0002\u01a1\u01a2\u000b\u0002\u0002\u0002\u01a2",
|
||||
"c\u0003\u0002\u0002\u0002\u01a3\u01a4\u0005D \u0002\u01a4\u01a5\u0003",
|
||||
"\u0002\u0002\u0002\u01a5\u01a6\b0\b\u0002\u01a6e\u0003\u0002\u0002\u0002",
|
||||
"\u01a7\u01a9\u0007\u000f\u0002\u0002\u01a8\u01a7\u0003\u0002\u0002\u0002",
|
||||
"\u01a8\u01a9\u0003\u0002\u0002\u0002\u01a9\u01aa\u0003\u0002\u0002\u0002",
|
||||
"\u01aa\u01ab\u0007\f\u0002\u0002\u01ab\u01ac\u0003\u0002\u0002\u0002",
|
||||
"\u01ac\u01ad\b1\t\u0002\u01ad\u01ae\b1\n\u0002\u01aeg\u0003\u0002\u0002",
|
||||
"\u0002\u01af\u01b0\u0007?\u0002\u0002\u01b0i\u0003\u0002\u0002\u0002",
|
||||
"\u01b1\u01b2\u0007.\u0002\u0002\u01b2k\u0003\u0002\u0002\u0002\u01b3",
|
||||
"\u01b4\u0005\u0016\t\u0002\u01b4\u01b5\u0005\b\u0002\u0002\u01b5\u01b6",
|
||||
"\u0005,\u0014\u0002\u01b6\u01b7\u0005*\u0013\u0002\u01b7\u01b8\u0005",
|
||||
"$\u0010\u0002\u01b8\u01b9\u0005\u001e\r\u0002\u01b9\u01bb\u0005\u0010",
|
||||
"\u0006\u0002\u01ba\u01bc\u0005,\u0014\u0002\u01bb\u01ba\u0003\u0002",
|
||||
"\u0002\u0002\u01bb\u01bc\u0003\u0002\u0002\u0002\u01bcm\u0003\u0002",
|
||||
"\u0002\u0002\u01bd\u01be\u00050\u0016\u0002\u01be\u01bf\u0005,\u0014",
|
||||
"\u0002\u01bf\u01c0\u0005\u0010\u0006\u0002\u01c0\u01c1\u0005,\u0014",
|
||||
"\u0002\u01c1\u01c2\u0005\u0012\u0007\u0002\u01c2\u01c3\u0005\u0010\u0006",
|
||||
"\u0002\u01c3\u01c4\u0005\b\u0002\u0002\u01c4\u01c5\u0005.\u0015\u0002",
|
||||
"\u01c5\u01c6\u00050\u0016\u0002\u01c6\u01c7\u0005*\u0013\u0002\u01c7",
|
||||
"\u01c9\u0005\u0010\u0006\u0002\u01c8\u01ca\u0005,\u0014\u0002\u01c9",
|
||||
"\u01c8\u0003\u0002\u0002\u0002\u01c9\u01ca\u0003\u0002\u0002\u0002\u01ca",
|
||||
"o\u0003\u0002\u0002\u0002\u01cb\u01cc\u0005,\u0014\u0002\u01cc\u01cd",
|
||||
"\u0005\u0018\n\u0002\u01cd\u01ce\u0005 \u000e\u0002\u01ce\u01cf\u0005",
|
||||
"&\u0011\u0002\u01cf\u01d0\u0005\u001e\r\u0002\u01d0\u01d1\u0005\u0010",
|
||||
"\u0006\u0002\u01d1\u0211\u0003\u0002\u0002\u0002\u01d2\u01d3\u0005\u001e",
|
||||
"\r\u0002\u01d3\u01d4\u0005\u0018\n\u0002\u01d4\u01d5\u0005,\u0014\u0002",
|
||||
"\u01d5\u01d6\u0005.\u0015\u0002\u01d6\u0211\u0003\u0002\u0002\u0002",
|
||||
"\u01d7\u01d8\u0005*\u0013\u0002\u01d8\u01d9\u0005\u0010\u0006\u0002",
|
||||
"\u01d9\u01da\u0005\u0014\b\u0002\u01da\u01db\u0005\u0010\u0006\u0002",
|
||||
"\u01db\u01dc\u00056\u0019\u0002\u01dc\u0211\u0003\u0002\u0002\u0002",
|
||||
"\u01dd\u01de\u0005&\u0011\u0002\u01de\u01df\u0005*\u0013\u0002\u01df",
|
||||
"\u01e0\u0005\u0010\u0006\u0002\u01e0\u01e1\u0005\n\u0003\u0002\u01e1",
|
||||
"\u01e2\u00050\u0016\u0002\u01e2\u01e3\u0005\u0018\n\u0002\u01e3\u01e4",
|
||||
"\u0005\u001e\r\u0002\u01e4\u01e5\u0005.\u0015\u0002\u01e5\u0211\u0003",
|
||||
"\u0002\u0002\u0002\u01e6\u01e7\u0005\f\u0004\u0002\u01e7\u01e8\u0005",
|
||||
"$\u0010\u0002\u01e8\u01e9\u0005 \u000e\u0002\u01e9\u01ea\u0005&\u0011",
|
||||
"\u0002\u01ea\u01eb\u0005$\u0010\u0002\u01eb\u01ec\u0005,\u0014\u0002",
|
||||
"\u01ec\u01ed\u0005\u0018\n\u0002\u01ed\u01ee\u0005.\u0015\u0002\u01ee",
|
||||
"\u01ef\u0005\u0010\u0006\u0002\u01ef\u0211\u0003\u0002\u0002\u0002\u01f0",
|
||||
"\u01f1\u0005 \u000e\u0002\u01f1\u01f2\u0005\u001e\r\u0002\u01f2\u0211",
|
||||
"\u0003\u0002\u0002\u0002\u01f3\u01f4\u0005&\u0011\u0002\u01f4\u01f5",
|
||||
"\u0005\b\u0002\u0002\u01f5\u01f6\u0005.\u0015\u0002\u01f6\u01f7\u0005",
|
||||
".\u0015\u0002\u01f7\u01f8\u0005\u0010\u0006\u0002\u01f8\u01f9\u0005",
|
||||
"*\u0013\u0002\u01f9\u01fa\u0005\"\u000f\u0002\u01fa\u01fb\u0005\b\u0002",
|
||||
"\u0002\u01fb\u01fc\u0005\"\u000f\u0002\u01fc\u01fd\u00058\u001a\u0002",
|
||||
"\u01fd\u0211\u0003\u0002\u0002\u0002\u01fe\u01ff\u0005&\u0011\u0002",
|
||||
"\u01ff\u0200\u0005\u0016\t\u0002\u0200\u0201\u0005*\u0013\u0002\u0201",
|
||||
"\u0202\u0005\b\u0002\u0002\u0202\u0203\u0005,\u0014\u0002\u0203\u0204",
|
||||
"\u0005\u0010\u0006\u0002\u0204\u0205\u0005\u001e\r\u0002\u0205\u0206",
|
||||
"\u0005\u0018\n\u0002\u0206\u0207\u0005,\u0014\u0002\u0207\u0208\u0005",
|
||||
".\u0015\u0002\u0208\u0211\u0003\u0002\u0002\u0002\u0209\u020a\u0005",
|
||||
"\u0018\n\u0002\u020a\u020b\u0005\"\u000f\u0002\u020b\u020c\u0005.\u0015",
|
||||
"\u0002\u020c\u020d\u0005\u0010\u0006\u0002\u020d\u020e\u0005\"\u000f",
|
||||
"\u0002\u020e\u020f\u0005.\u0015\u0002\u020f\u0211\u0003\u0002\u0002",
|
||||
"\u0002\u0210\u01cb\u0003\u0002\u0002\u0002\u0210\u01d2\u0003\u0002\u0002",
|
||||
"\u0002\u0210\u01d7\u0003\u0002\u0002\u0002\u0210\u01dd\u0003\u0002\u0002",
|
||||
"\u0002\u0210\u01e6\u0003\u0002\u0002\u0002\u0210\u01f0\u0003\u0002\u0002",
|
||||
"\u0002\u0210\u01f3\u0003\u0002\u0002\u0002\u0210\u01fe\u0003\u0002\u0002",
|
||||
"\u0002\u0210\u0209\u0003\u0002\u0002\u0002\u0211q\u0003\u0002\u0002",
|
||||
"\u0002\u0212\u0216\u0007*\u0002\u0002\u0213\u0215\n\u001f\u0002\u0002",
|
||||
"\u0214\u0213\u0003\u0002\u0002\u0002\u0215\u0218\u0003\u0002\u0002\u0002",
|
||||
"\u0216\u0214\u0003\u0002\u0002\u0002\u0216\u0217\u0003\u0002\u0002\u0002",
|
||||
"\u0217\u0219\u0003\u0002\u0002\u0002\u0218\u0216\u0003\u0002\u0002\u0002",
|
||||
"\u0219\u021a\u0007+\u0002\u0002\u021as\u0003\u0002\u0002\u0002\u021b",
|
||||
"\u021f\u0007]\u0002\u0002\u021c\u021e\n\"\u0002\u0002\u021d\u021c\u0003",
|
||||
"\u0002\u0002\u0002\u021e\u0221\u0003\u0002\u0002\u0002\u021f\u021d\u0003",
|
||||
"\u0002\u0002\u0002\u021f\u0220\u0003\u0002\u0002\u0002\u0220\u0222\u0003",
|
||||
"\u0002\u0002\u0002\u0221\u021f\u0003\u0002\u0002\u0002\u0222\u0223\u0007",
|
||||
"_\u0002\u0002\u0223u\u0003\u0002\u0002\u0002\u0224\u0228\u00071\u0002",
|
||||
"\u0002\u0225\u0227\n\u001f\u0002\u0002\u0226\u0225\u0003\u0002\u0002",
|
||||
"\u0002\u0227\u022a\u0003\u0002\u0002\u0002\u0228\u0226\u0003\u0002\u0002",
|
||||
"\u0002\u0228\u0229\u0003\u0002\u0002\u0002\u0229w\u0003\u0002\u0002",
|
||||
"\u0002\u022a\u0228\u0003\u0002\u0002\u0002\u022b\u022d\n#\u0002\u0002",
|
||||
"\u022c\u022b\u0003\u0002\u0002\u0002\u022d\u022e\u0003\u0002\u0002\u0002",
|
||||
"\u022e\u022c\u0003\u0002\u0002\u0002\u022e\u022f\u0003\u0002\u0002\u0002",
|
||||
"\u022fy\u0003\u0002\u0002\u0002\u0230\u0232\t$\u0002\u0002\u0231\u0233",
|
||||
"\n%\u0002\u0002\u0232\u0231\u0003\u0002\u0002\u0002\u0233\u0234\u0003",
|
||||
"\u0002\u0002\u0002\u0234\u0232\u0003\u0002\u0002\u0002\u0234\u0235\u0003",
|
||||
"\u0002\u0002\u0002\u0235\u0236\u0003\u0002\u0002\u0002\u0236\u0237\t",
|
||||
"$\u0002\u0002\u0237{\u0003\u0002\u0002\u0002\u0238\u0239\u0005D \u0002",
|
||||
"\u0239\u023a\u0003\u0002\u0002\u0002\u023a\u023b\b<\b\u0002\u023b}\u0003",
|
||||
"\u0002\u0002\u0002\u023c\u023d\u0007%\u0002\u0002\u023d\u023e\u0003",
|
||||
"\u0002\u0002\u0002\u023e\u023f\b=\u000b\u0002\u023f\u007f\u0003\u0002",
|
||||
"\u0002\u0002\u0240\u0242\u0007\u000f\u0002\u0002\u0241\u0240\u0003\u0002",
|
||||
"\u0002\u0002\u0241\u0242\u0003\u0002\u0002\u0002\u0242\u0243\u0003\u0002",
|
||||
"\u0002\u0002\u0243\u0244\u0007\f\u0002\u0002\u0244\u0245\u0003\u0002",
|
||||
"\u0002\u0002\u0245\u0246\b>\u0002\u0002\u0246\u0247\b>\n\u0002\u0247",
|
||||
"\u0081\u0003\u0002\u0002\u0002\u0248\u024c\u0005<\u001c\u0002\u0249",
|
||||
"\u024c\u0005>\u001d\u0002\u024a\u024c\u0007a\u0002\u0002\u024b\u0248",
|
||||
"\u0003\u0002\u0002\u0002\u024b\u0249\u0003\u0002\u0002\u0002\u024b\u024a",
|
||||
"\u0003\u0002\u0002\u0002\u024c\u0252\u0003\u0002\u0002\u0002\u024d\u0251",
|
||||
"\u0005<\u001c\u0002\u024e\u0251\u0005>\u001d\u0002\u024f\u0251\t&\u0002",
|
||||
"\u0002\u0250\u024d\u0003\u0002\u0002\u0002\u0250\u024e\u0003\u0002\u0002",
|
||||
"\u0002\u0250\u024f\u0003\u0002\u0002\u0002\u0251\u0254\u0003\u0002\u0002",
|
||||
"\u0002\u0252\u0250\u0003\u0002\u0002\u0002\u0252\u0253\u0003\u0002\u0002",
|
||||
"\u0002\u0253\u0083\u0003\u0002\u0002\u0002\u0254\u0252\u0003\u0002\u0002",
|
||||
"\u0002\u0255\u0256\u00070\u0002\u0002\u0256\u0085\u0003\u0002\u0002",
|
||||
"\u0002\u0257\u0258\u0005D \u0002\u0258\u0259\u0003\u0002\u0002\u0002",
|
||||
"\u0259\u025a\bA\b\u0002\u025a\u0087\u0003\u0002\u0002\u0002\u025b\u025d",
|
||||
"\u0007\u000f\u0002\u0002\u025c\u025b\u0003\u0002\u0002\u0002\u025c\u025d",
|
||||
"\u0003\u0002\u0002\u0002\u025d\u025e\u0003\u0002\u0002\u0002\u025e\u025f",
|
||||
"\u0007\f\u0002\u0002\u025f\u0260\u0003\u0002\u0002\u0002\u0260\u0261",
|
||||
"\bB\t\u0002\u0261\u0262\bB\n\u0002\u0262\u0089\u0003\u0002\u0002\u0002",
|
||||
"\u0263\u0265\u0007^\u0002\u0002\u0264\u0266\n\u001f\u0002\u0002\u0265",
|
||||
"\u0264\u0003\u0002\u0002\u0002\u0265\u0266\u0003\u0002\u0002\u0002\u0266",
|
||||
"\u008b\u0003\u0002\u0002\u0002\u0267\u0273\u0007}\u0002\u0002\u0268",
|
||||
"\u0272\n\'\u0002\u0002\u0269\u026d\u0007}\u0002\u0002\u026a\u026c\n",
|
||||
"\u001f\u0002\u0002\u026b\u026a\u0003\u0002\u0002\u0002\u026c\u026f\u0003",
|
||||
"\u0002\u0002\u0002\u026d\u026b\u0003\u0002\u0002\u0002\u026d\u026e\u0003",
|
||||
"\u0002\u0002\u0002\u026e\u0270\u0003\u0002\u0002\u0002\u026f\u026d\u0003",
|
||||
"\u0002\u0002\u0002\u0270\u0272\u0007\u007f\u0002\u0002\u0271\u0268\u0003",
|
||||
"\u0002\u0002\u0002\u0271\u0269\u0003\u0002\u0002\u0002\u0272\u0275\u0003",
|
||||
"\u0002\u0002\u0002\u0273\u0271\u0003\u0002\u0002\u0002\u0273\u0274\u0003",
|
||||
"\u0002\u0002\u0002\u0274\u0276\u0003\u0002\u0002\u0002\u0275\u0273\u0003",
|
||||
"\u0002\u0002\u0002\u0276\u0277\u0007\u007f\u0002\u0002\u0277\u008d\u0003",
|
||||
"\u0002\u0002\u0002\u0278\u027a\n(\u0002\u0002\u0279\u0278\u0003\u0002",
|
||||
"\u0002\u0002\u027a\u027b\u0003\u0002\u0002\u0002\u027b\u027c\u0003\u0002",
|
||||
"\u0002\u0002\u027b\u0279\u0003\u0002\u0002\u0002\u027c\u008f\u0003\u0002",
|
||||
"\u0002\u0002\u027d\u027e\u0005D \u0002\u027e\u027f\u0003\u0002\u0002",
|
||||
"\u0002\u027f\u0280\bF\b\u0002\u0280\u0091\u0003\u0002\u0002\u0002\u0281",
|
||||
"\u0283\u0007\u000f\u0002\u0002\u0282\u0281\u0003\u0002\u0002\u0002\u0282",
|
||||
"\u0283\u0003\u0002\u0002\u0002\u0283\u0284\u0003\u0002\u0002\u0002\u0284",
|
||||
"\u0285\u0007\f\u0002\u0002\u0285\u0286\u0003\u0002\u0002\u0002\u0286",
|
||||
"\u0287\bG\u0002\u0002\u0287\u0288\bG\n\u0002\u0288\u0093\u0003\u0002",
|
||||
"\u0002\u0002\u0289\u028d\u0007]\u0002\u0002\u028a\u028c\n\"\u0002\u0002",
|
||||
"\u028b\u028a\u0003\u0002\u0002\u0002\u028c\u028f\u0003\u0002\u0002\u0002",
|
||||
"\u028d\u028b\u0003\u0002\u0002\u0002\u028d\u028e\u0003\u0002\u0002\u0002",
|
||||
"\u028e\u0290\u0003\u0002\u0002\u0002\u028f\u028d\u0003\u0002\u0002\u0002",
|
||||
"\u0290\u0291\u0007_\u0002\u0002\u0291\u0095\u0003\u0002\u0002\u0002",
|
||||
"\u0292\u0296\u00071\u0002\u0002\u0293\u0295\n\u001f\u0002\u0002\u0294",
|
||||
"\u0293\u0003\u0002\u0002\u0002\u0295\u0298\u0003\u0002\u0002\u0002\u0296",
|
||||
"\u0294\u0003\u0002\u0002\u0002\u0296\u0297\u0003\u0002\u0002\u0002\u0297",
|
||||
"\u0097\u0003\u0002\u0002\u0002\u0298\u0296\u0003\u0002\u0002\u0002\u0299",
|
||||
"\u029b\n)\u0002\u0002\u029a\u0299\u0003\u0002\u0002\u0002\u029b\u029c",
|
||||
"\u0003\u0002\u0002\u0002\u029c\u029a\u0003\u0002\u0002\u0002\u029c\u029d",
|
||||
"\u0003\u0002\u0002\u0002\u029d\u0099\u0003\u0002\u0002\u0002\u029e\u029f",
|
||||
"\u0007<\u0002\u0002\u029f\u009b\u0003\u0002\u0002\u0002\u02a0\u02a2",
|
||||
"\u0007\u000f\u0002\u0002\u02a1\u02a0\u0003\u0002\u0002\u0002\u02a1\u02a2",
|
||||
"\u0003\u0002\u0002\u0002\u02a2\u02a3\u0003\u0002\u0002\u0002\u02a3\u02a4",
|
||||
"\u0007\f\u0002\u0002\u02a4\u02a5\u0003\u0002\u0002\u0002\u02a5\u02a6",
|
||||
"\bL\u0002\u0002\u02a6\u02a7\bL\n\u0002\u02a7\u009d\u0003\u0002\u0002",
|
||||
"\u0002\u02a8\u02aa\n*\u0002\u0002\u02a9\u02a8\u0003\u0002\u0002\u0002",
|
||||
"\u02aa\u02ab\u0003\u0002\u0002\u0002\u02ab\u02a9\u0003\u0002\u0002\u0002",
|
||||
"\u02ab\u02ac\u0003\u0002\u0002\u0002\u02ac\u009f\u0003\u0002\u0002\u0002",
|
||||
"2\u0002\u0003\u0004\u0005\u0006\u0007\u00df\u00e2\u00eb\u00f2\u010c",
|
||||
"\u0113\u0118\u011f\u0128\u012d\u0134\u013c\u0141\u015c\u0164\u017d\u018c",
|
||||
"\u01a8\u01bb\u01c9\u0210\u0216\u021f\u0228\u022e\u0234\u0241\u024b\u0250",
|
||||
"\u0252\u025c\u0265\u026d\u0271\u0273\u027b\u0282\u028d\u0296\u029c\u02a1",
|
||||
"\u02ab\f\b\u0002\u0002\u0007\u0007\u0002\u0007\u0004\u0002\u0007\u0005",
|
||||
"\u0002\u0007\u0006\u0002\u0007\u0003\u0002\t\u0003\u0002\t\u0004\u0002",
|
||||
"\u0006\u0002\u0002\t\t\u0002"].join("");
|
||||
|
||||
|
||||
var atn = new antlr4.atn.ATNDeserializer().deserialize(serializedATN);
|
||||
|
||||
var decisionsToDFA = atn.decisionToState.map( function(ds, index) { return new antlr4.dfa.DFA(ds, index); });
|
||||
|
||||
function LUFileLexer(input) {
|
||||
antlr4.Lexer.call(this, input);
|
||||
this._interp = new antlr4.atn.LexerATNSimulator(this, atn, decisionsToDFA, new antlr4.PredictionContextCache());
|
||||
return this;
|
||||
}
|
||||
|
||||
LUFileLexer.prototype = Object.create(antlr4.Lexer.prototype);
|
||||
LUFileLexer.prototype.constructor = LUFileLexer;
|
||||
|
||||
Object.defineProperty(LUFileLexer.prototype, "atn", {
|
||||
get : function() {
|
||||
return atn;
|
||||
}
|
||||
});
|
||||
|
||||
LUFileLexer.EOF = antlr4.Token.EOF;
|
||||
LUFileLexer.WS = 1;
|
||||
LUFileLexer.NEWLINE = 2;
|
||||
LUFileLexer.QNA_SOURCE_INFO = 3;
|
||||
LUFileLexer.MODEL_INFO = 4;
|
||||
LUFileLexer.COMMENT = 5;
|
||||
LUFileLexer.QNA = 6;
|
||||
LUFileLexer.HASH = 7;
|
||||
LUFileLexer.DASH = 8;
|
||||
LUFileLexer.DOLLAR = 9;
|
||||
LUFileLexer.AT = 10;
|
||||
LUFileLexer.IMPORT = 11;
|
||||
LUFileLexer.FILTER_MARK = 12;
|
||||
LUFileLexer.QNA_ID_MARK = 13;
|
||||
LUFileLexer.MULTI_LINE_TEXT = 14;
|
||||
LUFileLexer.PROMPT_MARK = 15;
|
||||
LUFileLexer.INVALID_TOKEN_DEFAULT_MODE = 16;
|
||||
LUFileLexer.EQUAL = 17;
|
||||
LUFileLexer.COMMA = 18;
|
||||
LUFileLexer.HAS_ROLES_LABEL = 19;
|
||||
LUFileLexer.HAS_FEATURES_LABEL = 20;
|
||||
LUFileLexer.NEW_ENTITY_TYPE_IDENTIFIER = 21;
|
||||
LUFileLexer.PHRASE_LIST_LABEL = 22;
|
||||
LUFileLexer.NEW_COMPOSITE_ENTITY = 23;
|
||||
LUFileLexer.NEW_REGEX_ENTITY = 24;
|
||||
LUFileLexer.NEW_ENTITY_IDENTIFIER = 25;
|
||||
LUFileLexer.NEW_ENTITY_IDENTIFIER_WITH_WS = 26;
|
||||
LUFileLexer.NEWLINE_IN_NAME = 27;
|
||||
LUFileLexer.IDENTIFIER = 28;
|
||||
LUFileLexer.DOT = 29;
|
||||
LUFileLexer.ESCAPE_CHARACTER = 30;
|
||||
LUFileLexer.EXPRESSION = 31;
|
||||
LUFileLexer.TEXT = 32;
|
||||
LUFileLexer.NEWLINE_IN_ENTITY = 33;
|
||||
LUFileLexer.COMPOSITE_ENTITY = 34;
|
||||
LUFileLexer.REGEX_ENTITY = 35;
|
||||
LUFileLexer.ENTITY_TEXT = 36;
|
||||
LUFileLexer.COLON_MARK = 37;
|
||||
LUFileLexer.NEWLINE_IN_QNA = 38;
|
||||
LUFileLexer.QNA_TEXT = 39;
|
||||
|
||||
LUFileLexer.NEW_ENTITY_MODE = 1;
|
||||
LUFileLexer.INTENT_NAME_MODE = 2;
|
||||
LUFileLexer.LIST_BODY_MODE = 3;
|
||||
LUFileLexer.ENTITY_MODE = 4;
|
||||
LUFileLexer.QNA_MODE = 5;
|
||||
|
||||
LUFileLexer.prototype.channelNames = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ];
|
||||
|
||||
LUFileLexer.prototype.modeNames = [ "DEFAULT_MODE", "NEW_ENTITY_MODE", "INTENT_NAME_MODE",
|
||||
"LIST_BODY_MODE", "ENTITY_MODE", "QNA_MODE" ];
|
||||
|
||||
LUFileLexer.prototype.literalNames = [ null, null, null, null, null, null,
|
||||
null, null, null, "'$'", "'@'", null,
|
||||
null, null, null, null, null, "'='",
|
||||
"','", null, null, null, null, null,
|
||||
null, null, null, null, null, "'.'",
|
||||
null, null, null, null, null, null,
|
||||
null, "':'" ];
|
||||
|
||||
LUFileLexer.prototype.symbolicNames = [ null, "WS", "NEWLINE", "QNA_SOURCE_INFO",
|
||||
"MODEL_INFO", "COMMENT", "QNA",
|
||||
"HASH", "DASH", "DOLLAR", "AT",
|
||||
"IMPORT", "FILTER_MARK", "QNA_ID_MARK",
|
||||
"MULTI_LINE_TEXT", "PROMPT_MARK",
|
||||
"INVALID_TOKEN_DEFAULT_MODE", "EQUAL",
|
||||
"COMMA", "HAS_ROLES_LABEL", "HAS_FEATURES_LABEL",
|
||||
"NEW_ENTITY_TYPE_IDENTIFIER", "PHRASE_LIST_LABEL",
|
||||
"NEW_COMPOSITE_ENTITY", "NEW_REGEX_ENTITY",
|
||||
"NEW_ENTITY_IDENTIFIER", "NEW_ENTITY_IDENTIFIER_WITH_WS",
|
||||
"NEWLINE_IN_NAME", "IDENTIFIER",
|
||||
"DOT", "ESCAPE_CHARACTER", "EXPRESSION",
|
||||
"TEXT", "NEWLINE_IN_ENTITY", "COMPOSITE_ENTITY",
|
||||
"REGEX_ENTITY", "ENTITY_TEXT", "COLON_MARK",
|
||||
"NEWLINE_IN_QNA", "QNA_TEXT" ];
|
||||
|
||||
LUFileLexer.prototype.ruleNames = [ "A", "B", "C", "D", "E", "F", "G", "H",
|
||||
"I", "J", "K", "L", "M", "N", "O", "P",
|
||||
"Q", "R", "S", "T", "U", "V", "W", "X",
|
||||
"Y", "Z", "LETTER", "NUMBER", "WHITESPACE",
|
||||
"UTTERANCE_MARK", "WS", "NEWLINE", "QNA_SOURCE_INFO",
|
||||
"MODEL_INFO", "COMMENT", "QNA", "HASH",
|
||||
"DASH", "DOLLAR", "AT", "IMPORT", "FILTER_MARK",
|
||||
"QNA_ID_MARK", "MULTI_LINE_TEXT", "PROMPT_MARK",
|
||||
"INVALID_TOKEN_DEFAULT_MODE", "WS_IN_NEW_ENTITY",
|
||||
"NEWLINE_IN_NEW_ENTITY", "EQUAL", "COMMA",
|
||||
"HAS_ROLES_LABEL", "HAS_FEATURES_LABEL",
|
||||
"NEW_ENTITY_TYPE_IDENTIFIER", "PHRASE_LIST_LABEL",
|
||||
"NEW_COMPOSITE_ENTITY", "NEW_REGEX_ENTITY",
|
||||
"NEW_ENTITY_IDENTIFIER", "NEW_ENTITY_IDENTIFIER_WITH_WS",
|
||||
"WS_IN_NAME", "HASH_IN_NAME", "NEWLINE_IN_NAME",
|
||||
"IDENTIFIER", "DOT", "WS_IN_LIST_BODY",
|
||||
"NEWLINE_IN_LIST_BODY", "ESCAPE_CHARACTER",
|
||||
"EXPRESSION", "TEXT", "WS_IN_ENTITY",
|
||||
"NEWLINE_IN_ENTITY", "COMPOSITE_ENTITY",
|
||||
"REGEX_ENTITY", "ENTITY_TEXT", "COLON_MARK",
|
||||
"NEWLINE_IN_QNA", "QNA_TEXT" ];
|
||||
|
||||
LUFileLexer.prototype.grammarFileName = "LUFileLexer.g4";
|
||||
|
||||
|
||||
exports.LUFileLexer = LUFileLexer;
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
WS=1
|
||||
NEWLINE=2
|
||||
QNA_SOURCE_INFO=3
|
||||
MODEL_INFO=4
|
||||
COMMENT=5
|
||||
QNA=6
|
||||
HASH=7
|
||||
DASH=8
|
||||
DOLLAR=9
|
||||
AT=10
|
||||
IMPORT=11
|
||||
FILTER_MARK=12
|
||||
QNA_ID_MARK=13
|
||||
MULTI_LINE_TEXT=14
|
||||
PROMPT_MARK=15
|
||||
INVALID_TOKEN_DEFAULT_MODE=16
|
||||
EQUAL=17
|
||||
COMMA=18
|
||||
HAS_ROLES_LABEL=19
|
||||
HAS_FEATURES_LABEL=20
|
||||
NEW_ENTITY_TYPE_IDENTIFIER=21
|
||||
PHRASE_LIST_LABEL=22
|
||||
NEW_COMPOSITE_ENTITY=23
|
||||
NEW_REGEX_ENTITY=24
|
||||
NEW_ENTITY_IDENTIFIER=25
|
||||
NEW_ENTITY_IDENTIFIER_WITH_WS=26
|
||||
NEWLINE_IN_NAME=27
|
||||
IDENTIFIER=28
|
||||
DOT=29
|
||||
ESCAPE_CHARACTER=30
|
||||
EXPRESSION=31
|
||||
TEXT=32
|
||||
NEWLINE_IN_ENTITY=33
|
||||
COMPOSITE_ENTITY=34
|
||||
REGEX_ENTITY=35
|
||||
ENTITY_TEXT=36
|
||||
COLON_MARK=37
|
||||
NEWLINE_IN_QNA=38
|
||||
QNA_TEXT=39
|
||||
'$'=9
|
||||
'@'=10
|
||||
'='=17
|
||||
','=18
|
||||
'.'=29
|
||||
':'=37
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,45 @@
|
|||
WS=1
|
||||
NEWLINE=2
|
||||
QNA_SOURCE_INFO=3
|
||||
MODEL_INFO=4
|
||||
COMMENT=5
|
||||
QNA=6
|
||||
HASH=7
|
||||
DASH=8
|
||||
DOLLAR=9
|
||||
AT=10
|
||||
IMPORT=11
|
||||
FILTER_MARK=12
|
||||
QNA_ID_MARK=13
|
||||
MULTI_LINE_TEXT=14
|
||||
PROMPT_MARK=15
|
||||
INVALID_TOKEN_DEFAULT_MODE=16
|
||||
EQUAL=17
|
||||
COMMA=18
|
||||
HAS_ROLES_LABEL=19
|
||||
HAS_FEATURES_LABEL=20
|
||||
NEW_ENTITY_TYPE_IDENTIFIER=21
|
||||
PHRASE_LIST_LABEL=22
|
||||
NEW_COMPOSITE_ENTITY=23
|
||||
NEW_REGEX_ENTITY=24
|
||||
NEW_ENTITY_IDENTIFIER=25
|
||||
NEW_ENTITY_IDENTIFIER_WITH_WS=26
|
||||
NEWLINE_IN_NAME=27
|
||||
IDENTIFIER=28
|
||||
DOT=29
|
||||
ESCAPE_CHARACTER=30
|
||||
EXPRESSION=31
|
||||
TEXT=32
|
||||
NEWLINE_IN_ENTITY=33
|
||||
COMPOSITE_ENTITY=34
|
||||
REGEX_ENTITY=35
|
||||
ENTITY_TEXT=36
|
||||
COLON_MARK=37
|
||||
NEWLINE_IN_QNA=38
|
||||
QNA_TEXT=39
|
||||
'$'=9
|
||||
'@'=10
|
||||
'='=17
|
||||
','=18
|
||||
'.'=29
|
||||
':'=37
|
|
@ -0,0 +1,528 @@
|
|||
// Generated from ../LUFileParser.g4 by ANTLR 4.8
|
||||
// jshint ignore: start
|
||||
var antlr4 = require('antlr4/index');
|
||||
|
||||
// This class defines a complete listener for a parse tree produced by LUFileParser.
|
||||
function LUFileParserListener() {
|
||||
antlr4.tree.ParseTreeListener.call(this);
|
||||
return this;
|
||||
}
|
||||
|
||||
LUFileParserListener.prototype = Object.create(antlr4.tree.ParseTreeListener.prototype);
|
||||
LUFileParserListener.prototype.constructor = LUFileParserListener;
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#file.
|
||||
LUFileParserListener.prototype.enterFile = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#file.
|
||||
LUFileParserListener.prototype.exitFile = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#paragraph.
|
||||
LUFileParserListener.prototype.enterParagraph = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#paragraph.
|
||||
LUFileParserListener.prototype.exitParagraph = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#newline.
|
||||
LUFileParserListener.prototype.enterNewline = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#newline.
|
||||
LUFileParserListener.prototype.exitNewline = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#errorString.
|
||||
LUFileParserListener.prototype.enterErrorString = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#errorString.
|
||||
LUFileParserListener.prototype.exitErrorString = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#nestedIntentSection.
|
||||
LUFileParserListener.prototype.enterNestedIntentSection = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#nestedIntentSection.
|
||||
LUFileParserListener.prototype.exitNestedIntentSection = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#nestedIntentNameLine.
|
||||
LUFileParserListener.prototype.enterNestedIntentNameLine = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#nestedIntentNameLine.
|
||||
LUFileParserListener.prototype.exitNestedIntentNameLine = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#nestedIntentName.
|
||||
LUFileParserListener.prototype.enterNestedIntentName = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#nestedIntentName.
|
||||
LUFileParserListener.prototype.exitNestedIntentName = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#nameIdentifier.
|
||||
LUFileParserListener.prototype.enterNameIdentifier = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#nameIdentifier.
|
||||
LUFileParserListener.prototype.exitNameIdentifier = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#nestedIntentBodyDefinition.
|
||||
LUFileParserListener.prototype.enterNestedIntentBodyDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#nestedIntentBodyDefinition.
|
||||
LUFileParserListener.prototype.exitNestedIntentBodyDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#subIntentDefinition.
|
||||
LUFileParserListener.prototype.enterSubIntentDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#subIntentDefinition.
|
||||
LUFileParserListener.prototype.exitSubIntentDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#simpleIntentSection.
|
||||
LUFileParserListener.prototype.enterSimpleIntentSection = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#simpleIntentSection.
|
||||
LUFileParserListener.prototype.exitSimpleIntentSection = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#intentDefinition.
|
||||
LUFileParserListener.prototype.enterIntentDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#intentDefinition.
|
||||
LUFileParserListener.prototype.exitIntentDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#intentNameLine.
|
||||
LUFileParserListener.prototype.enterIntentNameLine = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#intentNameLine.
|
||||
LUFileParserListener.prototype.exitIntentNameLine = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#intentName.
|
||||
LUFileParserListener.prototype.enterIntentName = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#intentName.
|
||||
LUFileParserListener.prototype.exitIntentName = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#intentBody.
|
||||
LUFileParserListener.prototype.enterIntentBody = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#intentBody.
|
||||
LUFileParserListener.prototype.exitIntentBody = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#normalIntentBody.
|
||||
LUFileParserListener.prototype.enterNormalIntentBody = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#normalIntentBody.
|
||||
LUFileParserListener.prototype.exitNormalIntentBody = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#normalIntentString.
|
||||
LUFileParserListener.prototype.enterNormalIntentString = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#normalIntentString.
|
||||
LUFileParserListener.prototype.exitNormalIntentString = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#newEntitySection.
|
||||
LUFileParserListener.prototype.enterNewEntitySection = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#newEntitySection.
|
||||
LUFileParserListener.prototype.exitNewEntitySection = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#newEntityDefinition.
|
||||
LUFileParserListener.prototype.enterNewEntityDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#newEntityDefinition.
|
||||
LUFileParserListener.prototype.exitNewEntityDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#newEntityListbody.
|
||||
LUFileParserListener.prototype.enterNewEntityListbody = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#newEntityListbody.
|
||||
LUFileParserListener.prototype.exitNewEntityListbody = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#newEntityLine.
|
||||
LUFileParserListener.prototype.enterNewEntityLine = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#newEntityLine.
|
||||
LUFileParserListener.prototype.exitNewEntityLine = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#newCompositeDefinition.
|
||||
LUFileParserListener.prototype.enterNewCompositeDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#newCompositeDefinition.
|
||||
LUFileParserListener.prototype.exitNewCompositeDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#newRegexDefinition.
|
||||
LUFileParserListener.prototype.enterNewRegexDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#newRegexDefinition.
|
||||
LUFileParserListener.prototype.exitNewRegexDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#newEntityType.
|
||||
LUFileParserListener.prototype.enterNewEntityType = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#newEntityType.
|
||||
LUFileParserListener.prototype.exitNewEntityType = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#newEntityRoles.
|
||||
LUFileParserListener.prototype.enterNewEntityRoles = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#newEntityRoles.
|
||||
LUFileParserListener.prototype.exitNewEntityRoles = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#newEntityUsesFeatures.
|
||||
LUFileParserListener.prototype.enterNewEntityUsesFeatures = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#newEntityUsesFeatures.
|
||||
LUFileParserListener.prototype.exitNewEntityUsesFeatures = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#newEntityRoleOrFeatures.
|
||||
LUFileParserListener.prototype.enterNewEntityRoleOrFeatures = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#newEntityRoleOrFeatures.
|
||||
LUFileParserListener.prototype.exitNewEntityRoleOrFeatures = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#newEntityName.
|
||||
LUFileParserListener.prototype.enterNewEntityName = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#newEntityName.
|
||||
LUFileParserListener.prototype.exitNewEntityName = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#newEntityNameWithWS.
|
||||
LUFileParserListener.prototype.enterNewEntityNameWithWS = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#newEntityNameWithWS.
|
||||
LUFileParserListener.prototype.exitNewEntityNameWithWS = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#entitySection.
|
||||
LUFileParserListener.prototype.enterEntitySection = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#entitySection.
|
||||
LUFileParserListener.prototype.exitEntitySection = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#entityDefinition.
|
||||
LUFileParserListener.prototype.enterEntityDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#entityDefinition.
|
||||
LUFileParserListener.prototype.exitEntityDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#entityLine.
|
||||
LUFileParserListener.prototype.enterEntityLine = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#entityLine.
|
||||
LUFileParserListener.prototype.exitEntityLine = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#entityName.
|
||||
LUFileParserListener.prototype.enterEntityName = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#entityName.
|
||||
LUFileParserListener.prototype.exitEntityName = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#entityType.
|
||||
LUFileParserListener.prototype.enterEntityType = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#entityType.
|
||||
LUFileParserListener.prototype.exitEntityType = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#compositeEntityIdentifier.
|
||||
LUFileParserListener.prototype.enterCompositeEntityIdentifier = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#compositeEntityIdentifier.
|
||||
LUFileParserListener.prototype.exitCompositeEntityIdentifier = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#regexEntityIdentifier.
|
||||
LUFileParserListener.prototype.enterRegexEntityIdentifier = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#regexEntityIdentifier.
|
||||
LUFileParserListener.prototype.exitRegexEntityIdentifier = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#entityListBody.
|
||||
LUFileParserListener.prototype.enterEntityListBody = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#entityListBody.
|
||||
LUFileParserListener.prototype.exitEntityListBody = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#normalItemString.
|
||||
LUFileParserListener.prototype.enterNormalItemString = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#normalItemString.
|
||||
LUFileParserListener.prototype.exitNormalItemString = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#importSection.
|
||||
LUFileParserListener.prototype.enterImportSection = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#importSection.
|
||||
LUFileParserListener.prototype.exitImportSection = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#importDefinition.
|
||||
LUFileParserListener.prototype.enterImportDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#importDefinition.
|
||||
LUFileParserListener.prototype.exitImportDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#qnaSection.
|
||||
LUFileParserListener.prototype.enterQnaSection = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#qnaSection.
|
||||
LUFileParserListener.prototype.exitQnaSection = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#qnaDefinition.
|
||||
LUFileParserListener.prototype.enterQnaDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#qnaDefinition.
|
||||
LUFileParserListener.prototype.exitQnaDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#qnaSourceInfo.
|
||||
LUFileParserListener.prototype.enterQnaSourceInfo = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#qnaSourceInfo.
|
||||
LUFileParserListener.prototype.exitQnaSourceInfo = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#qnaIdMark.
|
||||
LUFileParserListener.prototype.enterQnaIdMark = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#qnaIdMark.
|
||||
LUFileParserListener.prototype.exitQnaIdMark = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#qnaQuestion.
|
||||
LUFileParserListener.prototype.enterQnaQuestion = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#qnaQuestion.
|
||||
LUFileParserListener.prototype.exitQnaQuestion = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#questionText.
|
||||
LUFileParserListener.prototype.enterQuestionText = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#questionText.
|
||||
LUFileParserListener.prototype.exitQuestionText = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#moreQuestionsBody.
|
||||
LUFileParserListener.prototype.enterMoreQuestionsBody = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#moreQuestionsBody.
|
||||
LUFileParserListener.prototype.exitMoreQuestionsBody = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#moreQuestion.
|
||||
LUFileParserListener.prototype.enterMoreQuestion = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#moreQuestion.
|
||||
LUFileParserListener.prototype.exitMoreQuestion = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#errorQuestionString.
|
||||
LUFileParserListener.prototype.enterErrorQuestionString = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#errorQuestionString.
|
||||
LUFileParserListener.prototype.exitErrorQuestionString = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#qnaAnswerBody.
|
||||
LUFileParserListener.prototype.enterQnaAnswerBody = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#qnaAnswerBody.
|
||||
LUFileParserListener.prototype.exitQnaAnswerBody = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#filterSection.
|
||||
LUFileParserListener.prototype.enterFilterSection = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#filterSection.
|
||||
LUFileParserListener.prototype.exitFilterSection = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#promptSection.
|
||||
LUFileParserListener.prototype.enterPromptSection = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#promptSection.
|
||||
LUFileParserListener.prototype.exitPromptSection = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#filterLine.
|
||||
LUFileParserListener.prototype.enterFilterLine = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#filterLine.
|
||||
LUFileParserListener.prototype.exitFilterLine = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#errorFilterLine.
|
||||
LUFileParserListener.prototype.enterErrorFilterLine = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#errorFilterLine.
|
||||
LUFileParserListener.prototype.exitErrorFilterLine = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#multiLineAnswer.
|
||||
LUFileParserListener.prototype.enterMultiLineAnswer = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#multiLineAnswer.
|
||||
LUFileParserListener.prototype.exitMultiLineAnswer = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#modelInfoSection.
|
||||
LUFileParserListener.prototype.enterModelInfoSection = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#modelInfoSection.
|
||||
LUFileParserListener.prototype.exitModelInfoSection = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
// Enter a parse tree produced by LUFileParser#modelInfoDefinition.
|
||||
LUFileParserListener.prototype.enterModelInfoDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
// Exit a parse tree produced by LUFileParser#modelInfoDefinition.
|
||||
LUFileParserListener.prototype.exitModelInfoDefinition = function(ctx) {
|
||||
};
|
||||
|
||||
|
||||
|
||||
exports.LUFileParserListener = LUFileParserListener;
|
|
@ -0,0 +1,358 @@
|
|||
// Generated from ../LUFileParser.g4 by ANTLR 4.8
|
||||
// jshint ignore: start
|
||||
var antlr4 = require('antlr4/index');
|
||||
|
||||
// This class defines a complete generic visitor for a parse tree produced by LUFileParser.
|
||||
|
||||
function LUFileParserVisitor() {
|
||||
antlr4.tree.ParseTreeVisitor.call(this);
|
||||
return this;
|
||||
}
|
||||
|
||||
LUFileParserVisitor.prototype = Object.create(antlr4.tree.ParseTreeVisitor.prototype);
|
||||
LUFileParserVisitor.prototype.constructor = LUFileParserVisitor;
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#file.
|
||||
LUFileParserVisitor.prototype.visitFile = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#paragraph.
|
||||
LUFileParserVisitor.prototype.visitParagraph = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#newline.
|
||||
LUFileParserVisitor.prototype.visitNewline = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#errorString.
|
||||
LUFileParserVisitor.prototype.visitErrorString = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#nestedIntentSection.
|
||||
LUFileParserVisitor.prototype.visitNestedIntentSection = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#nestedIntentNameLine.
|
||||
LUFileParserVisitor.prototype.visitNestedIntentNameLine = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#nestedIntentName.
|
||||
LUFileParserVisitor.prototype.visitNestedIntentName = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#nameIdentifier.
|
||||
LUFileParserVisitor.prototype.visitNameIdentifier = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#nestedIntentBodyDefinition.
|
||||
LUFileParserVisitor.prototype.visitNestedIntentBodyDefinition = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#subIntentDefinition.
|
||||
LUFileParserVisitor.prototype.visitSubIntentDefinition = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#simpleIntentSection.
|
||||
LUFileParserVisitor.prototype.visitSimpleIntentSection = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#intentDefinition.
|
||||
LUFileParserVisitor.prototype.visitIntentDefinition = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#intentNameLine.
|
||||
LUFileParserVisitor.prototype.visitIntentNameLine = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#intentName.
|
||||
LUFileParserVisitor.prototype.visitIntentName = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#intentBody.
|
||||
LUFileParserVisitor.prototype.visitIntentBody = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#normalIntentBody.
|
||||
LUFileParserVisitor.prototype.visitNormalIntentBody = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#normalIntentString.
|
||||
LUFileParserVisitor.prototype.visitNormalIntentString = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#newEntitySection.
|
||||
LUFileParserVisitor.prototype.visitNewEntitySection = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#newEntityDefinition.
|
||||
LUFileParserVisitor.prototype.visitNewEntityDefinition = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#newEntityListbody.
|
||||
LUFileParserVisitor.prototype.visitNewEntityListbody = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#newEntityLine.
|
||||
LUFileParserVisitor.prototype.visitNewEntityLine = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#newCompositeDefinition.
|
||||
LUFileParserVisitor.prototype.visitNewCompositeDefinition = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#newRegexDefinition.
|
||||
LUFileParserVisitor.prototype.visitNewRegexDefinition = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#newEntityType.
|
||||
LUFileParserVisitor.prototype.visitNewEntityType = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#newEntityRoles.
|
||||
LUFileParserVisitor.prototype.visitNewEntityRoles = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#newEntityUsesFeatures.
|
||||
LUFileParserVisitor.prototype.visitNewEntityUsesFeatures = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#newEntityRoleOrFeatures.
|
||||
LUFileParserVisitor.prototype.visitNewEntityRoleOrFeatures = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#newEntityName.
|
||||
LUFileParserVisitor.prototype.visitNewEntityName = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#newEntityNameWithWS.
|
||||
LUFileParserVisitor.prototype.visitNewEntityNameWithWS = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#entitySection.
|
||||
LUFileParserVisitor.prototype.visitEntitySection = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#entityDefinition.
|
||||
LUFileParserVisitor.prototype.visitEntityDefinition = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#entityLine.
|
||||
LUFileParserVisitor.prototype.visitEntityLine = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#entityName.
|
||||
LUFileParserVisitor.prototype.visitEntityName = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#entityType.
|
||||
LUFileParserVisitor.prototype.visitEntityType = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#compositeEntityIdentifier.
|
||||
LUFileParserVisitor.prototype.visitCompositeEntityIdentifier = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#regexEntityIdentifier.
|
||||
LUFileParserVisitor.prototype.visitRegexEntityIdentifier = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#entityListBody.
|
||||
LUFileParserVisitor.prototype.visitEntityListBody = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#normalItemString.
|
||||
LUFileParserVisitor.prototype.visitNormalItemString = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#importSection.
|
||||
LUFileParserVisitor.prototype.visitImportSection = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#importDefinition.
|
||||
LUFileParserVisitor.prototype.visitImportDefinition = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#qnaSection.
|
||||
LUFileParserVisitor.prototype.visitQnaSection = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#qnaDefinition.
|
||||
LUFileParserVisitor.prototype.visitQnaDefinition = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#qnaSourceInfo.
|
||||
LUFileParserVisitor.prototype.visitQnaSourceInfo = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#qnaIdMark.
|
||||
LUFileParserVisitor.prototype.visitQnaIdMark = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#qnaQuestion.
|
||||
LUFileParserVisitor.prototype.visitQnaQuestion = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#questionText.
|
||||
LUFileParserVisitor.prototype.visitQuestionText = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#moreQuestionsBody.
|
||||
LUFileParserVisitor.prototype.visitMoreQuestionsBody = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#moreQuestion.
|
||||
LUFileParserVisitor.prototype.visitMoreQuestion = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#errorQuestionString.
|
||||
LUFileParserVisitor.prototype.visitErrorQuestionString = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#qnaAnswerBody.
|
||||
LUFileParserVisitor.prototype.visitQnaAnswerBody = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#filterSection.
|
||||
LUFileParserVisitor.prototype.visitFilterSection = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#promptSection.
|
||||
LUFileParserVisitor.prototype.visitPromptSection = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#filterLine.
|
||||
LUFileParserVisitor.prototype.visitFilterLine = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#errorFilterLine.
|
||||
LUFileParserVisitor.prototype.visitErrorFilterLine = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#multiLineAnswer.
|
||||
LUFileParserVisitor.prototype.visitMultiLineAnswer = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#modelInfoSection.
|
||||
LUFileParserVisitor.prototype.visitModelInfoSection = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
// Visit a parse tree produced by LUFileParser#modelInfoDefinition.
|
||||
LUFileParserVisitor.prototype.visitModelInfoDefinition = function(ctx) {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
|
||||
|
||||
|
||||
exports.LUFileParserVisitor = LUFileParserVisitor;
|
|
@ -0,0 +1,52 @@
|
|||
const ImportSectionContext = require('./generated/LUFileParser').LUFileParser.ImportSectionContext;
|
||||
const BuildDiagnostic = require('./diagnostic').BuildDiagnostic;
|
||||
const LUSectionTypes = require('./../utils/enums/lusectiontypes');
|
||||
const BaseSection = require('./baseSection');
|
||||
const Range = require('./diagnostic').Range;
|
||||
const Position = require('./diagnostic').Position;
|
||||
|
||||
class ImportSection extends BaseSection {
|
||||
/**
|
||||
*
|
||||
* @param {ImportSectionContext} parseTree
|
||||
*/
|
||||
constructor(parseTree) {
|
||||
super();
|
||||
this.Errors = []
|
||||
this.SectionType = LUSectionTypes.IMPORTSECTION;
|
||||
let result = this.ExtractDescriptionAndPath(parseTree);
|
||||
this.Description = result.description;
|
||||
this.Path = result.path;
|
||||
this.Id = `${this.SectionType}_${this.Path}`;
|
||||
const startPosition = new Position(parseTree.start.line, parseTree.start.column);
|
||||
const stopPosition = new Position(parseTree.stop.line, parseTree.stop.column + parseTree.stop.text.length);
|
||||
this.Range = new Range(startPosition, stopPosition);
|
||||
}
|
||||
|
||||
ExtractDescriptionAndPath(parseTree) {
|
||||
let importRegex = new RegExp(/\[([^\]]*)\]\(([^\)]*)\)/);
|
||||
let importStr = parseTree.importDefinition().IMPORT().getText();
|
||||
|
||||
let description
|
||||
let path
|
||||
|
||||
let groups = importStr.match(importRegex);
|
||||
if (groups && groups.length === 3) {
|
||||
description = groups[1].trim();
|
||||
path = groups[2].trim();
|
||||
if (path === undefined || path === '') {
|
||||
let errorMsg = `LU file reference path is empty: "${parseTree.getText()}"`;
|
||||
let error = BuildDiagnostic({
|
||||
message: errorMsg,
|
||||
context: parseTree
|
||||
})
|
||||
|
||||
this.Errors.push(error);
|
||||
}
|
||||
}
|
||||
|
||||
return { description, path }
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ImportSection;
|
|
@ -0,0 +1,36 @@
|
|||
const antlr4 = require('antlr4');
|
||||
const Position = require('./diagnostic').Position;
|
||||
const Range = require('./diagnostic').Range;
|
||||
const Diagnostic = require('./diagnostic').Diagnostic;
|
||||
const AntlrTokens = require('./diagnostic').AntlrTokens;
|
||||
|
||||
let LUErrorListener = function(errors) {
|
||||
antlr4.error.ErrorListener.call(this);
|
||||
this.errors = errors;
|
||||
return this;
|
||||
}
|
||||
|
||||
LUErrorListener.prototype = Object.create(antlr4.error.ErrorListener.prototype);
|
||||
LUErrorListener.prototype.constructor = LUErrorListener;
|
||||
LUErrorListener.prototype.syntaxError = function(recognizer, offendingSymbol, line, charPositionInLine, msg, e) {
|
||||
const startPosition = new Position(line, charPositionInLine);
|
||||
const stopPosition = new Position(line, charPositionInLine + offendingSymbol.stop - offendingSymbol.start + 1);
|
||||
const range = new Range(startPosition, stopPosition);
|
||||
msg = `syntax error: ` + msg;
|
||||
const invalidToken = msg.match(/'([^']+)'/)[1];
|
||||
const expectedTokenStr = msg.substring(msg.indexOf('{') + 1, msg.lastIndexOf('}'));
|
||||
const expectedTokens = expectedTokenStr.split(',');
|
||||
if (expectedTokenStr.length > 0 && expectedTokens.length > 0) {
|
||||
msg = `syntax error: invalid input '${invalidToken}' detected. Expecting one of this - `;
|
||||
expectedTokens.forEach(token => {
|
||||
msg += AntlrTokens[token.trim()] + ', ';
|
||||
});
|
||||
|
||||
msg = msg.substring(0, msg.lastIndexOf(', '));
|
||||
}
|
||||
|
||||
const diagnostic = new Diagnostic(range, msg);
|
||||
this.errors.push(diagnostic);
|
||||
}
|
||||
|
||||
module.exports = LUErrorListener;
|
|
@ -0,0 +1,434 @@
|
|||
const antlr4 = require('antlr4');
|
||||
const LUFileLexer = require('./generated/LUFileLexer').LUFileLexer;
|
||||
const LUFileParser = require('./generated/LUFileParser').LUFileParser;
|
||||
const FileContext = require('./generated/LUFileParser').LUFileParser.FileContext;
|
||||
const LUResource = require('./luResource');
|
||||
const NestedIntentSection = require('./nestedIntentSection');
|
||||
const SimpleIntentSection = require('./simpleIntentSection');
|
||||
const EntitySection = require('./entitySection');
|
||||
const NewEntitySection = require('./newEntitySection');
|
||||
const ImportSection = require('./importSection');
|
||||
const QnaSection = require('./qnaSection');
|
||||
const ModelInfoSection = require('./modelInfoSection');
|
||||
const LUErrorListener = require('./luErrorListener');
|
||||
const SectionType = require('./../utils/enums/lusectiontypes');
|
||||
const DiagnosticSeverity = require('./diagnostic').DiagnosticSeverity;
|
||||
const BuildDiagnostic = require('./diagnostic').BuildDiagnostic;
|
||||
const Range = require('./diagnostic').Range;
|
||||
const Position = require('./diagnostic').Position;
|
||||
const NEWLINE = require('os').EOL;
|
||||
|
||||
class LUParser {
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} text
|
||||
* @param {LUResource} luResource
|
||||
*/
|
||||
static parseWithRef(text, luResource) {
|
||||
if (text === undefined || text === '') {
|
||||
return new LUResource([], '', []);
|
||||
}
|
||||
|
||||
const sectionEnabled = luResource ? this.isSectionEnabled(luResource.Sections) : undefined;
|
||||
|
||||
return this.parse(text, sectionEnabled);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
*/
|
||||
static parse(text, sectionEnabled) {
|
||||
if (text === undefined || text === '') {
|
||||
return new LUResource([], '', []);
|
||||
}
|
||||
|
||||
let {fileContent, errors} = this.getFileContent(text);
|
||||
|
||||
return this.extractFileContent(fileContent, text, errors, sectionEnabled);
|
||||
}
|
||||
|
||||
static extractFileContent(fileContent, content, errors, sectionEnabled) {
|
||||
let sections = [];
|
||||
try {
|
||||
let modelInfoSections = this.extractModelInfoSections(fileContent);
|
||||
modelInfoSections.forEach(section => errors = errors.concat(section.Errors));
|
||||
sections = sections.concat(modelInfoSections);
|
||||
} catch (err) {
|
||||
errors.push(BuildDiagnostic({
|
||||
message: `Error happened when parsing model information: ${err.message}`
|
||||
}))
|
||||
}
|
||||
|
||||
try {
|
||||
let isSectionEnabled = sectionEnabled === undefined ? this.isSectionEnabled(sections) : sectionEnabled;
|
||||
|
||||
let nestedIntentSections = this.extractNestedIntentSections(fileContent, content);
|
||||
nestedIntentSections.forEach(section => errors = errors.concat(section.Errors));
|
||||
if (isSectionEnabled) {
|
||||
sections = sections.concat(nestedIntentSections);
|
||||
} else {
|
||||
nestedIntentSections.forEach(section => {
|
||||
let emptyIntentSection = new SimpleIntentSection();
|
||||
emptyIntentSection.Name = section.Name;
|
||||
emptyIntentSection.Id = `${emptyIntentSection.SectionType}_${emptyIntentSection.Name}`
|
||||
|
||||
// get the end character index
|
||||
// this is default value
|
||||
// it will be reset in function extractSectionBody()
|
||||
let endCharacter = section.Name.length + 2;
|
||||
|
||||
const range = new Range(section.Range.Start, new Position(section.Range.Start.Line, endCharacter))
|
||||
emptyIntentSection.Range = range;
|
||||
let errorMsg = `no utterances found for intent definition: "# ${emptyIntentSection.Name}"`
|
||||
let error = BuildDiagnostic({
|
||||
message: errorMsg,
|
||||
range: emptyIntentSection.Range,
|
||||
severity: DiagnosticSeverity.WARN
|
||||
})
|
||||
|
||||
errors.push(error);
|
||||
sections.push(emptyIntentSection);
|
||||
|
||||
section.SimpleIntentSections.forEach(subSection => {
|
||||
sections.push(subSection);
|
||||
errors = errors.concat(subSection.Errors);
|
||||
})
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
errors.push(BuildDiagnostic({
|
||||
message: `Error happened when parsing nested intent section: ${err.message}`
|
||||
}))
|
||||
}
|
||||
|
||||
try {
|
||||
let simpleIntentSections = this.extractSimpleIntentSections(fileContent, content);
|
||||
simpleIntentSections.forEach(section => errors = errors.concat(section.Errors));
|
||||
sections = sections.concat(simpleIntentSections);
|
||||
} catch (err) {
|
||||
errors.push(BuildDiagnostic({
|
||||
message: `Error happened when parsing simple intent section: ${err.message}`
|
||||
}))
|
||||
}
|
||||
|
||||
try {
|
||||
let entitySections = this.extractEntitiesSections(fileContent);
|
||||
entitySections.forEach(section => errors = errors.concat(section.Errors));
|
||||
sections = sections.concat(entitySections);
|
||||
} catch (err) {
|
||||
errors.push(BuildDiagnostic({
|
||||
message: `Error happened when parsing entities: ${err.message}`
|
||||
}))
|
||||
}
|
||||
|
||||
try {
|
||||
let newEntitySections = this.extractNewEntitiesSections(fileContent);
|
||||
newEntitySections.forEach(section => errors = errors.concat(section.Errors));
|
||||
sections = sections.concat(newEntitySections);
|
||||
} catch (err) {
|
||||
errors.push(BuildDiagnostic({
|
||||
message: `Error happened when parsing new entities: ${err.message}`
|
||||
}))
|
||||
}
|
||||
|
||||
try {
|
||||
let importSections = this.extractImportSections(fileContent);
|
||||
importSections.forEach(section => errors = errors.concat(section.Errors));
|
||||
sections = sections.concat(importSections);
|
||||
} catch (err) {
|
||||
errors.push(BuildDiagnostic({
|
||||
message: `Error happened when parsing import section: ${err.message}`
|
||||
}))
|
||||
}
|
||||
|
||||
try {
|
||||
let qnaSections = this.extractQnaSections(fileContent);
|
||||
qnaSections.forEach(section => errors = errors.concat(section.Errors));
|
||||
sections = sections.concat(qnaSections);
|
||||
} catch (err) {
|
||||
errors.push(BuildDiagnostic({
|
||||
message: `Error happened when parsing qna section: ${err.message}`
|
||||
}))
|
||||
}
|
||||
|
||||
sections = this.reconstractIntentSections(sections)
|
||||
|
||||
this.extractSectionBody(sections, content)
|
||||
console.log(JSON.stringify({sections, content, errors}))
|
||||
|
||||
return new LUResource(sections, content, errors);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
*/
|
||||
static getFileContent(text) {
|
||||
if (text === undefined
|
||||
|| text === ''
|
||||
|| text === null) {
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const chars = new antlr4.InputStream(text);
|
||||
const lexer = new LUFileLexer(chars);
|
||||
const tokens = new antlr4.CommonTokenStream(lexer);
|
||||
const parser = new LUFileParser(tokens);
|
||||
let errors = [];
|
||||
const listener = new LUErrorListener(errors)
|
||||
parser.removeErrorListeners();
|
||||
parser.addErrorListener(listener);
|
||||
parser.buildParseTrees = true;
|
||||
const fileContent = parser.file();
|
||||
|
||||
return { fileContent, errors };
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FileContext} fileContext
|
||||
* @param {string} content
|
||||
*/
|
||||
static extractNestedIntentSections(fileContext, content) {
|
||||
if (fileContext === undefined
|
||||
|| fileContext === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let nestedIntentSections = fileContext.paragraph()
|
||||
.map(x => x.nestedIntentSection())
|
||||
.filter(x => x !== undefined && x !== null);
|
||||
|
||||
let nestedIntentSectionList = nestedIntentSections.map(x => new NestedIntentSection(x, content));
|
||||
|
||||
return nestedIntentSectionList;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FileContext} fileContext
|
||||
* @param {string} content
|
||||
*/
|
||||
static extractSimpleIntentSections(fileContext, content) {
|
||||
if (fileContext === undefined
|
||||
|| fileContext === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let simpleIntentSections = fileContext.paragraph()
|
||||
.map(x => x.simpleIntentSection())
|
||||
.filter(x => x && x.intentDefinition());
|
||||
|
||||
let simpleIntentSectionList = simpleIntentSections.map(x => new SimpleIntentSection(x, content));
|
||||
|
||||
return simpleIntentSectionList;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FileContext} fileContext
|
||||
*/
|
||||
static extractEntitiesSections(fileContext) {
|
||||
if (fileContext === undefined
|
||||
|| fileContext === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let entitySections = fileContext.paragraph()
|
||||
.map(x => x.entitySection())
|
||||
.filter(x => x && x.entityDefinition());
|
||||
|
||||
let entitySectionList = entitySections.map(x => new EntitySection(x));
|
||||
|
||||
return entitySectionList;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FileContext} fileContext
|
||||
*/
|
||||
static extractNewEntitiesSections(fileContext) {
|
||||
if (fileContext === undefined
|
||||
|| fileContext === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let newEntitySections = fileContext.paragraph()
|
||||
.map(x => x.newEntitySection())
|
||||
.filter(x => x && x.newEntityDefinition());
|
||||
|
||||
let newEntitySectionList = newEntitySections.map(x => new NewEntitySection(x));
|
||||
|
||||
return newEntitySectionList;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FileContext} fileContext
|
||||
*/
|
||||
static extractImportSections(fileContext) {
|
||||
if (fileContext === undefined
|
||||
|| fileContext === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let importSections = fileContext.paragraph()
|
||||
.map(x => x.importSection())
|
||||
.filter(x => x !== undefined && x !== null);
|
||||
|
||||
let importSectionList = importSections.map(x => new ImportSection(x));
|
||||
|
||||
return importSectionList;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FileContext} fileContext
|
||||
*/
|
||||
static extractQnaSections(fileContext) {
|
||||
if (fileContext === undefined
|
||||
|| fileContext === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let qnaSections = fileContext.paragraph()
|
||||
.map(x => x.qnaSection())
|
||||
.filter(x => x !== undefined && x !== null);
|
||||
|
||||
let qnaSectionList = qnaSections.map(x => new QnaSection(x));
|
||||
|
||||
return qnaSectionList;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FileContext} fileContext
|
||||
*/
|
||||
static extractModelInfoSections(fileContext) {
|
||||
if (fileContext === undefined
|
||||
|| fileContext === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let modelInfoSections = fileContext.paragraph()
|
||||
.map(x => x.modelInfoSection())
|
||||
.filter(x => x !== undefined && x !== null);
|
||||
|
||||
let modelInfoSectionList = modelInfoSections.map(x => new ModelInfoSection(x));
|
||||
|
||||
return modelInfoSectionList;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any[]} sections
|
||||
*/
|
||||
static reconstractIntentSections(sections) {
|
||||
let newSections = []
|
||||
sections.sort((a, b) => a.Range.Start.Line - b.Range.Start.Line)
|
||||
let index
|
||||
for (index = 0; index < sections.length; index++) {
|
||||
let section = sections[index]
|
||||
if (index + 1 === sections.length) {
|
||||
newSections.push(section)
|
||||
break
|
||||
}
|
||||
|
||||
if (section.SectionType === SectionType.NESTEDINTENTSECTION) {
|
||||
if (sections[index + 1].SectionType === SectionType.ENTITYSECTION
|
||||
|| sections[index + 1].SectionType === SectionType.NEWENTITYSECTION) {
|
||||
let simpleIntentSections = section.SimpleIntentSections
|
||||
simpleIntentSections[simpleIntentSections.length - 1].Entities.push(sections[index + 1])
|
||||
simpleIntentSections[simpleIntentSections.length - 1].Errors.push(...sections[index + 1].Errors)
|
||||
index++
|
||||
|
||||
while (index + 1 < sections.length
|
||||
&& (sections[index + 1].SectionType === SectionType.ENTITYSECTION
|
||||
|| sections[index + 1].SectionType === SectionType.NEWENTITYSECTION
|
||||
|| (sections[index + 1].SectionType === SectionType.SIMPLEINTENTSECTION && sections[index + 1].IntentNameLine.includes('##')))) {
|
||||
if (sections[index + 1].SectionType === SectionType.ENTITYSECTION
|
||||
|| sections[index + 1].SectionType === SectionType.NEWENTITYSECTION) {
|
||||
simpleIntentSections[simpleIntentSections.length - 1].Entities.push(sections[index + 1])
|
||||
simpleIntentSections[simpleIntentSections.length - 1].Errors.push(...sections[index + 1].Errors)
|
||||
} else {
|
||||
simpleIntentSections.push(sections[index + 1])
|
||||
}
|
||||
|
||||
index++
|
||||
}
|
||||
|
||||
simpleIntentSections.forEach(s => section.Errors.push(...s.Errors))
|
||||
|
||||
section.SimpleIntentSection = simpleIntentSections
|
||||
}
|
||||
} else if (section.SectionType === SectionType.SIMPLEINTENTSECTION) {
|
||||
while (index + 1 < sections.length && (sections[index + 1].SectionType === SectionType.ENTITYSECTION
|
||||
|| sections[index + 1].SectionType === SectionType.NEWENTITYSECTION)) {
|
||||
section.Entities.push(sections[index + 1])
|
||||
section.Errors.push(...sections[index + 1].Errors)
|
||||
index++
|
||||
}
|
||||
}
|
||||
|
||||
newSections.push(section)
|
||||
}
|
||||
|
||||
return newSections
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any[]} sections
|
||||
* @param {string} content
|
||||
*/
|
||||
static extractSectionBody(sections, content) {
|
||||
const originList = content.split(/\r?\n/)
|
||||
let qnaSectionIndex = 0
|
||||
sections.forEach(function (section, index) {
|
||||
if (section.SectionType === SectionType.SIMPLEINTENTSECTION
|
||||
|| section.SectionType === SectionType.NESTEDINTENTSECTION
|
||||
|| section.SectionType === SectionType.QNASECTION) {
|
||||
const startLine = section.Range.Start.Line - 1;
|
||||
let stopLine
|
||||
if (index + 1 < sections.length) {
|
||||
stopLine = sections[index + 1].Range.Start.Line - 1
|
||||
if (isNaN(startLine) || isNaN(stopLine) || startLine < 0 || startLine > stopLine) {
|
||||
throw new Error("index out of range.")
|
||||
}
|
||||
} else {
|
||||
stopLine = originList.length
|
||||
}
|
||||
section.Range.End.Line = stopLine;
|
||||
section.Range.End.Character = originList[stopLine - 1].length
|
||||
|
||||
let destList
|
||||
if (section.SectionType === SectionType.QNASECTION) {
|
||||
destList = originList.slice(startLine, stopLine)
|
||||
section.Id = qnaSectionIndex
|
||||
qnaSectionIndex++
|
||||
} else {
|
||||
destList = originList.slice(startLine + 1, stopLine)
|
||||
}
|
||||
|
||||
section.Body = destList.join(NEWLINE)
|
||||
|
||||
if (section.SectionType === SectionType.NESTEDINTENTSECTION) {
|
||||
LUParser.extractSectionBody(section.SimpleIntentSections, originList.slice(0, stopLine).join(NEWLINE))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
static isSectionEnabled(sections) {
|
||||
let modelInfoSections = sections.filter(s => s.SectionType === SectionType.MODELINFOSECTION);
|
||||
let enableSections = false;
|
||||
if (modelInfoSections && modelInfoSections.length > 0) {
|
||||
for (const modelInfo of modelInfoSections) {
|
||||
let line = modelInfo.ModelInfo
|
||||
let kvPair = line.split(/@(enableSections).(.*)=/g).map(item => item.trim());
|
||||
if (kvPair.length === 4) {
|
||||
if (kvPair[1] === 'enableSections' && kvPair[3] === 'true') {
|
||||
enableSections = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return enableSections;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LUParser;
|
|
@ -0,0 +1,14 @@
|
|||
class LUResource {
|
||||
/**
|
||||
* @param {any[]} sections
|
||||
* @param {string} content
|
||||
* @param {any[]} errors
|
||||
*/
|
||||
constructor(sections, content, errors) {
|
||||
this.Sections = sections || [];
|
||||
this.Content = content;
|
||||
this.Errors = errors || [];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LUResource;
|
|
@ -0,0 +1,24 @@
|
|||
const ModelInfoSectionContext = require('./generated/LUFileParser').LUFileParser.ModelInfoSectionContext;
|
||||
const LUSectionTypes = require('./../utils/enums/lusectiontypes');
|
||||
const BaseSection = require('./baseSection');
|
||||
const Range = require('./diagnostic').Range;
|
||||
const Position = require('./diagnostic').Position;
|
||||
|
||||
class LUModelInfo extends BaseSection {
|
||||
/**
|
||||
*
|
||||
* @param {ModelInfoSectionContext} parseTree
|
||||
*/
|
||||
constructor(parseTree) {
|
||||
super();
|
||||
this.SectionType = LUSectionTypes.MODELINFOSECTION;
|
||||
this.ModelInfo = parseTree.modelInfoDefinition().getText();
|
||||
this.Errors = [];
|
||||
this.Id = `${this.SectionType}_${this.ModelInfo}`;
|
||||
const startPosition = new Position(parseTree.start.line, parseTree.start.column);
|
||||
const stopPosition = new Position(parseTree.stop.line, parseTree.stop.column + parseTree.stop.text.length);
|
||||
this.Range = new Range(startPosition, stopPosition);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LUModelInfo;
|
|
@ -0,0 +1,49 @@
|
|||
const NestedIntentSectionContext = require('./generated/LUFileParser').LUFileParser.NestedIntentSectionContext
|
||||
const SimpleIntentSection = require('./simpleIntentSection');
|
||||
const LUSectionTypes = require('./../utils/enums/lusectiontypes');
|
||||
const NEWLINE = require('os').EOL;
|
||||
const BaseSection = require('./baseSection');
|
||||
const Range = require('./diagnostic').Range;
|
||||
const Position = require('./diagnostic').Position;
|
||||
|
||||
class NestedIntentSection extends BaseSection {
|
||||
/**
|
||||
*
|
||||
* @param {NestedIntentSectionContext} parseTree
|
||||
*/
|
||||
constructor(parseTree, content) {
|
||||
super();
|
||||
this.SectionType = LUSectionTypes.NESTEDINTENTSECTION;
|
||||
this.Name = this.ExtractName(parseTree);
|
||||
this.Body = '';
|
||||
this.SimpleIntentSections = this.ExtractSimpleIntentSections(parseTree, content);
|
||||
this.Errors = [];
|
||||
if (this.SimpleIntentSections && this.SimpleIntentSections.length > 0) {
|
||||
this.SimpleIntentSections.forEach(section => {
|
||||
this.Errors = this.Errors.concat(section.Errors);
|
||||
});
|
||||
}
|
||||
|
||||
this.Id = `${this.SectionType}_${this.Name}`;
|
||||
const startPosition = new Position(parseTree.start.line, parseTree.start.column);
|
||||
const stopPosition = new Position(parseTree.stop.line, parseTree.stop.column + parseTree.stop.text.length);
|
||||
this.Range = new Range(startPosition, stopPosition);
|
||||
}
|
||||
|
||||
ExtractName(parseTree) {
|
||||
return parseTree.nestedIntentNameLine().nestedIntentName().getText().trim();
|
||||
}
|
||||
|
||||
ExtractSimpleIntentSections(parseTree, content) {
|
||||
let simpleIntentSections = [];
|
||||
for(const subIntentDefinition of parseTree.nestedIntentBodyDefinition().subIntentDefinition()) {
|
||||
let simpleIntentSection = new SimpleIntentSection(subIntentDefinition.simpleIntentSection(), content);
|
||||
simpleIntentSection.Range.Start.Character = 0
|
||||
simpleIntentSections.push(simpleIntentSection);
|
||||
}
|
||||
|
||||
return simpleIntentSections;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = NestedIntentSection;
|
|
@ -0,0 +1,117 @@
|
|||
const NewEntitySectionContext = require('./generated/LUFileParser').LUFileParser.NewEntitySectionContext;
|
||||
const DiagnosticSeverity = require('./diagnostic').DiagnosticSeverity;
|
||||
const BuildDiagnostic = require('./diagnostic').BuildDiagnostic;
|
||||
const LUSectionTypes = require('./../utils/enums/lusectiontypes');
|
||||
const InvalidCharsInIntentOrEntityName = require('./../utils/enums/invalidchars').InvalidCharsInIntentOrEntityName;
|
||||
const BaseSection = require('./baseSection');
|
||||
const Range = require('./diagnostic').Range;
|
||||
const Position = require('./diagnostic').Position;
|
||||
|
||||
class NewEntitySection extends BaseSection {
|
||||
/**
|
||||
*
|
||||
* @param {NewEntitySectionContext} parseTree
|
||||
*/
|
||||
constructor(parseTree) {
|
||||
super();
|
||||
this.SectionType = LUSectionTypes.NEWENTITYSECTION;
|
||||
this.Errors = []
|
||||
this.Name = this.ExtractName(parseTree);
|
||||
this.Type = this.ExtractType(parseTree);
|
||||
this.Roles = this.ExtractRoles(parseTree);
|
||||
this.Features = this.ExtractFeatures(parseTree);
|
||||
this.CompositeDefinition = this.ExtractCompositeDefinition(parseTree);
|
||||
this.RegexDefinition = this.ExtractRegexDefinition(parseTree);
|
||||
this.ListBody = this.ExtractSynonymsOrPhraseList(parseTree);
|
||||
this.Id = `${this.SectionType}_${this.Name}`;
|
||||
const startPosition = new Position(parseTree.start.line, parseTree.start.column);
|
||||
const stopPosition = new Position(parseTree.stop.line, parseTree.stop.column + parseTree.stop.text.length);
|
||||
this.Range = new Range(startPosition, stopPosition);
|
||||
}
|
||||
|
||||
ExtractName(parseTree) {
|
||||
let entityName
|
||||
if (parseTree.newEntityDefinition().newEntityLine().newEntityName()) {
|
||||
entityName = parseTree.newEntityDefinition().newEntityLine().newEntityName().getText().trim();
|
||||
} else if (parseTree.newEntityDefinition().newEntityLine().newEntityNameWithWS()) {
|
||||
entityName = parseTree.newEntityDefinition().newEntityLine().newEntityNameWithWS().getText().trim();
|
||||
} else {
|
||||
this.Errors.push(BuildDiagnostic({
|
||||
message: "Invalid entity line, did you miss entity name after @",
|
||||
context: parseTree.newEntityDefinition().newEntityLine()
|
||||
}))
|
||||
}
|
||||
|
||||
if (entityName && InvalidCharsInIntentOrEntityName.some(x => entityName.includes(x))) {
|
||||
this.Errors.push(BuildDiagnostic({
|
||||
message: `Invalid entity line, entity name ${entityName} cannot contain any of the following characters: [<, >, *, %, &, :, \\, $]`,
|
||||
context: parseTree.newEntityDefinition().newEntityLine()
|
||||
}));
|
||||
} else {
|
||||
return entityName;
|
||||
}
|
||||
}
|
||||
|
||||
ExtractType(parseTree) {
|
||||
if (parseTree.newEntityDefinition().newEntityLine().newEntityType()) {
|
||||
return parseTree.newEntityDefinition().newEntityLine().newEntityType().getText().trim();
|
||||
}
|
||||
}
|
||||
|
||||
ExtractRoles(parseTree) {
|
||||
if (parseTree.newEntityDefinition().newEntityLine().newEntityRoles()) {
|
||||
return parseTree.newEntityDefinition().newEntityLine().newEntityRoles().newEntityRoleOrFeatures().getText().trim();
|
||||
}
|
||||
}
|
||||
|
||||
ExtractFeatures(parseTree) {
|
||||
if (parseTree.newEntityDefinition().newEntityLine().newEntityUsesFeatures()) {
|
||||
return parseTree.newEntityDefinition().newEntityLine().newEntityUsesFeatures().newEntityRoleOrFeatures().getText().trim();
|
||||
}
|
||||
}
|
||||
|
||||
ExtractCompositeDefinition(parseTree) {
|
||||
if (parseTree.newEntityDefinition().newEntityLine().newCompositeDefinition()) {
|
||||
return parseTree.newEntityDefinition().newEntityLine().newCompositeDefinition().getText().trim();
|
||||
}
|
||||
}
|
||||
|
||||
ExtractRegexDefinition(parseTree) {
|
||||
if (parseTree.newEntityDefinition().newEntityLine().newRegexDefinition()) {
|
||||
return parseTree.newEntityDefinition().newEntityLine().newRegexDefinition().getText().trim();
|
||||
}
|
||||
}
|
||||
|
||||
ExtractSynonymsOrPhraseList(parseTree) {
|
||||
let synonymsOrPhraseList = [];
|
||||
|
||||
if (parseTree.newEntityDefinition().newEntityListbody()) {
|
||||
for (const errorItemStr of parseTree.newEntityDefinition().newEntityListbody().errorString()) {
|
||||
if (errorItemStr.getText().trim() !== '') {
|
||||
this.Errors.push(BuildDiagnostic({
|
||||
message: "Invalid list entity line, did you miss '-' at line begin",
|
||||
context: errorItemStr
|
||||
}))}
|
||||
}
|
||||
|
||||
for (const normalItemStr of parseTree.newEntityDefinition().newEntityListbody().normalItemString()) {
|
||||
synonymsOrPhraseList.push(normalItemStr.getText());
|
||||
}
|
||||
}
|
||||
|
||||
if (this.Type && this.Type.indexOf('=') > -1 && synonymsOrPhraseList.length === 0) {
|
||||
let errorMsg = `no synonyms list found for list entity definition: "${parseTree.newEntityDefinition().entityLine().getText()}"`;
|
||||
let error = BuildDiagnostic({
|
||||
message: errorMsg,
|
||||
context: parseTree.newEntityDefinition().entityLine(),
|
||||
severity: DiagnosticSeverity.WARN
|
||||
})
|
||||
|
||||
this.Errors.push(error);
|
||||
}
|
||||
|
||||
return synonymsOrPhraseList;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = NewEntitySection;
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,152 @@
|
|||
const QnaSectionContext = require('./generated/LUFileParser').LUFileParser.QnaSectionContext;
|
||||
const LUSectionTypes = require('./../utils/enums/lusectiontypes');
|
||||
const BuildDiagnostic = require('./diagnostic').BuildDiagnostic;
|
||||
const QNA_GENERIC_SOURCE = "custom editorial";
|
||||
const BaseSection = require('./baseSection');
|
||||
const Range = require('./diagnostic').Range;
|
||||
const Position = require('./diagnostic').Position;
|
||||
|
||||
class QnaSection extends BaseSection {
|
||||
/**
|
||||
*
|
||||
* @param {QnaSectionContext} parseTree
|
||||
*/
|
||||
constructor(parseTree) {
|
||||
super();
|
||||
this.SectionType = LUSectionTypes.QNASECTION;
|
||||
this.Questions = [this.ExtractQuestion(parseTree)];
|
||||
let result = this.ExtractMoreQuestions(parseTree);
|
||||
this.Questions = this.Questions.concat(result.questions);
|
||||
this.Errors = result.errors;
|
||||
result = this.ExtractFilterPairs(parseTree);
|
||||
this.FilterPairs = result.filterPairs;
|
||||
this.Errors = this.Errors.concat(result.errors);
|
||||
this.Answer = this.ExtractAnswer(parseTree);
|
||||
result = this.ExtractPrompts(parseTree);
|
||||
this.prompts = result.promptDefinitions;
|
||||
this.promptsText = result.promptTextList;
|
||||
this.Errors = this.Errors.concat(result.errors);
|
||||
this.QAPairId = this.ExtractAssignedId(parseTree);
|
||||
this.source = this.ExtractSourceInfo(parseTree);
|
||||
const startPosition = new Position(parseTree.start.line, parseTree.start.column);
|
||||
const stopPosition = new Position(parseTree.stop.line, parseTree.stop.column + parseTree.stop.text.length);
|
||||
this.Range = new Range(startPosition, stopPosition);
|
||||
}
|
||||
|
||||
ExtractSourceInfo(parseTree) {
|
||||
let srcAssignment = parseTree.qnaDefinition().qnaSourceInfo()
|
||||
if (srcAssignment) {
|
||||
let srcRegExp = /^[ ]*\>[ ]*!#[ ]*@qna.pair.source[ ]*=[ ]*(?<sourceInfo>.*?)$/gmi;
|
||||
let srcParsed = srcRegExp.exec(srcAssignment.getText().trim());
|
||||
return srcParsed.groups.sourceInfo || QNA_GENERIC_SOURCE;
|
||||
}
|
||||
return QNA_GENERIC_SOURCE
|
||||
}
|
||||
|
||||
ExtractAssignedId(parseTree) {
|
||||
let idAssignment = parseTree.qnaDefinition().qnaIdMark()
|
||||
if (idAssignment) {
|
||||
let idTextRegExp = /^\<a[ ]*id[ ]*=[ ]*[\"\'](?<idCaptured>.*?)[\"\'][ ]*>[ ]*\<\/a\>$/gmi;
|
||||
let idTextParsed = idTextRegExp.exec(idAssignment.getText().trim());
|
||||
return idTextParsed.groups.idCaptured || undefined;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
ExtractPrompts(parseTree) {
|
||||
let promptDefinitions = [];
|
||||
let promptTextList = []
|
||||
let errors = [];
|
||||
let promptSection = parseTree.qnaDefinition().promptSection();
|
||||
if (!promptSection) {
|
||||
return { promptDefinitions, errors };
|
||||
}
|
||||
if (promptSection.errorFilterLine() !== undefined) {
|
||||
for (const errorFilterLineStr of promptSection.errorFilterLine()) {
|
||||
if (errorFilterLineStr.getText().trim() !== '') {
|
||||
errors.push(BuildDiagnostic({
|
||||
message: "Invalid QnA prompt line, expecting '-' prefix for each line.",
|
||||
context: errorFilterLineStr
|
||||
}))}
|
||||
}
|
||||
}
|
||||
|
||||
for (const promptLine of promptSection.filterLine()) {
|
||||
let filterLineText = promptLine.getText().trim();
|
||||
filterLineText = filterLineText.substr(1).trim();
|
||||
promptTextList.push(filterLineText);
|
||||
let promptConfigurationRegExp = /^\[(?<displayText>.*?)]\([ ]*\#[ ]*[ ?]*(?<linkedQuestion>.*?)\)[ ]*(?<contextOnly>\`context-only\`)?.*?$/gmi;
|
||||
let splitLine = promptConfigurationRegExp.exec(filterLineText);
|
||||
if (!splitLine) {
|
||||
errors.push(BuildDiagnostic({
|
||||
message: "Invalid QnA prompt definition. Unable to parse prompt. Please verify syntax as well as question link`.",
|
||||
context: filterLineText
|
||||
}))
|
||||
}
|
||||
promptDefinitions.push(splitLine.groups);
|
||||
}
|
||||
return { promptDefinitions, promptTextList, errors };
|
||||
}
|
||||
|
||||
ExtractQuestion(parseTree) {
|
||||
return parseTree.qnaDefinition().qnaQuestion().questionText().getText().trim();
|
||||
}
|
||||
|
||||
ExtractMoreQuestions(parseTree) {
|
||||
let questions = [];
|
||||
let errors = [];
|
||||
let questionsBody = parseTree.qnaDefinition().moreQuestionsBody();
|
||||
for (const errorQuestionStr of questionsBody.errorQuestionString()) {
|
||||
if (errorQuestionStr.getText().trim() !== '') {
|
||||
errors.push(BuildDiagnostic({
|
||||
message: "Invalid QnA question line, did you miss '-' at line begin",
|
||||
context: errorQuestionStr
|
||||
}))}
|
||||
}
|
||||
|
||||
for (const question of questionsBody.moreQuestion()) {
|
||||
let questionText = question.getText().trim();
|
||||
questions.push(questionText.substr(1).trim());
|
||||
}
|
||||
|
||||
return { questions, errors };
|
||||
}
|
||||
|
||||
ExtractFilterPairs(parseTree) {
|
||||
let filterPairs = [];
|
||||
let errors = [];
|
||||
let filterSection = parseTree.qnaDefinition().qnaAnswerBody().filterSection();
|
||||
if (filterSection) {
|
||||
if (filterSection.errorFilterLine() !== undefined) {
|
||||
for (const errorFilterLineStr of filterSection.errorFilterLine()) {
|
||||
if (errorFilterLineStr.getText().trim() !== '') {
|
||||
errors.push(BuildDiagnostic({
|
||||
message: "Invalid QnA filter line, did you miss '-' at line begin",
|
||||
context: errorFilterLineStr
|
||||
}))}
|
||||
}
|
||||
}
|
||||
|
||||
for (const filterLine of filterSection.filterLine()) {
|
||||
let filterLineText = filterLine.getText().trim();
|
||||
filterLineText = filterLineText.substr(1).trim()
|
||||
let filterPair = filterLineText.split('=');
|
||||
let key = filterPair[0].trim();
|
||||
let value = filterPair[1].trim();
|
||||
filterPairs.push({ key, value });
|
||||
}
|
||||
}
|
||||
|
||||
return { filterPairs, errors };
|
||||
}
|
||||
|
||||
ExtractAnswer(parseTree) {
|
||||
let multiLineAnswer = parseTree.qnaDefinition().qnaAnswerBody().multiLineAnswer().getText().trim();
|
||||
// trim first and last line
|
||||
let answerRegexp = /^```(markdown)?\r*\n(?<answer>(.|\n|\r\n|\t| )*)\r?\n.*?```$/gim;
|
||||
let answer = answerRegexp.exec(multiLineAnswer);
|
||||
return answer.groups.answer !== undefined ? answer.groups.answer : '';
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = QnaSection;
|
|
@ -0,0 +1,44 @@
|
|||
const fs = require('fs');
|
||||
|
||||
const readTextFile = {
|
||||
readSync: function(file){
|
||||
try {
|
||||
let fileBuffer = fs.readFileSync(file);
|
||||
if(fileBuffer) {
|
||||
// If the data starts with BOM, we know it is UTF
|
||||
if( fileBuffer[0] === 0xEF && fileBuffer[1] === 0xBB && fileBuffer[2] === 0xBF) {
|
||||
// EF BB BF UTF-8 with BOM
|
||||
fileBuffer = fileBuffer.slice(3);
|
||||
} else if(fileBuffer[0] === 0xFF && fileBuffer[1] === 0xFE && fileBuffer[2] === 0x00 && fileBuffer[3] === 0x00) {
|
||||
// FF FE 00 00 UTF-32, little-endian BOM
|
||||
fileBuffer = fileBuffer.slice(4);
|
||||
} else if(fileBuffer[0] === 0x00 && fileBuffer[1] === 0x00 && fileBuffer[2] === 0xFE && fileBuffer[3] === 0xFF) {
|
||||
// 00 00 FE FF UTF-32, big-endian BOM
|
||||
fileBuffer = fileBuffer.slice(4);
|
||||
} else if(fileBuffer[0] === 0xFE && fileBuffer[1] === 0xFF && fileBuffer[2] === 0x00 && fileBuffer[3] === 0x00) {
|
||||
// FE FF 00 00 UCS-4, unusual octet order BOM (3412)
|
||||
fileBuffer = fileBuffer.slice(4);
|
||||
} else if(fileBuffer[0] === 0x00 && fileBuffer[1] === 0x00 && fileBuffer[2] === 0xFF && fileBuffer[3] === 0xFE) {
|
||||
// 00 00 FF FE UCS-4, unusual octet order BOM (2143)
|
||||
fileBuffer = fileBuffer.slice(4);
|
||||
} else if(fileBuffer[0] === 0xFF && fileBuffer[1] === 0xFE) {
|
||||
// FF FE UTF-16, little endian BOM
|
||||
fileBuffer = fileBuffer.slice(2);
|
||||
} else if(fileBuffer[0] === 0xFE && fileBuffer[1] === 0xFF) {
|
||||
// FE FF UTF-16, big endian BOM
|
||||
fileBuffer = fileBuffer.slice(2);
|
||||
}
|
||||
}
|
||||
|
||||
return fileBuffer.toString('utf8').replace(/\0/g, '');
|
||||
} catch(err) {
|
||||
if (err.message.match(/ENOENT: no such file or directory/)) {
|
||||
throw err;
|
||||
}
|
||||
throw (new Error('Invalid Input', `Sorry, unable to parse file: \n\n ${JSON.stringify(err, null, 2)}\n\n`));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = readTextFile;
|
||||
|
|
@ -0,0 +1,272 @@
|
|||
const luParser = require('./luParser');
|
||||
const helpers = require('./../utils/helpers');
|
||||
const NEWLINE = require('os').EOL;
|
||||
const LUResource = require('./luResource');
|
||||
const LUSectionTypes = require('../utils/enums/lusectiontypes');
|
||||
|
||||
class SectionOperator {
|
||||
|
||||
/**
|
||||
* @param {LUResource} luresource
|
||||
*/
|
||||
constructor(luresource) {
|
||||
this.Luresource = JSON.parse(JSON.stringify(luresource));
|
||||
}
|
||||
|
||||
// After CRUD, section Ids will keep same unless you change section name.
|
||||
addSection(sectionContent) {
|
||||
sectionContent = helpers.sanitizeNewLines(sectionContent);
|
||||
|
||||
const newResource = luParser.parseWithRef(sectionContent, this.Luresource);
|
||||
if (!newResource) {
|
||||
return this.Luresource;
|
||||
}
|
||||
|
||||
if (this.Luresource.Sections.some(u => u.Id === newResource.Id)) {
|
||||
throw new Error(`Section with id: ${newResource.Id} exists.`);
|
||||
}
|
||||
|
||||
const offset = !this.Luresource.Content ? 0 : this.Luresource.Content.split(/\r?\n/).length;
|
||||
|
||||
this.Luresource.Content = this.Luresource.Content !== '' ? `${this.Luresource.Content}${NEWLINE}${sectionContent}` : sectionContent;
|
||||
|
||||
// add a NestedIntentSection may appears multiple sections
|
||||
this.adjustRangeForAddSection(newResource.Sections, offset);
|
||||
this.Luresource.Sections.push(...newResource.Sections);
|
||||
|
||||
this.adjustRangeForErrors(newResource.Errors, offset);
|
||||
this.Luresource.Errors.push(...newResource.Errors);
|
||||
|
||||
luParser.extractSectionBody(this.Luresource.Sections, this.Luresource.Content);
|
||||
return this.Luresource;
|
||||
}
|
||||
|
||||
updateSection(id, sectionContent) {
|
||||
sectionContent = helpers.sanitizeNewLines(sectionContent);
|
||||
const sectionIndex = this.Luresource.Sections.findIndex(u => u.Id === id);
|
||||
if (sectionIndex < 0) {
|
||||
return this.Luresource;
|
||||
}
|
||||
|
||||
const oldSection = this.Luresource.Sections[sectionIndex];
|
||||
const newResource = luParser.parseWithRef(sectionContent, this.Luresource);
|
||||
if (!newResource) {
|
||||
return this.Luresource;
|
||||
}
|
||||
|
||||
// add a NestedIntentSection may appears multiple sections
|
||||
const startLine = oldSection.Range.Start.Line;
|
||||
const endLine = oldSection.Range.End.Line;
|
||||
|
||||
this.removeErrors(this.Luresource.Errors, startLine, endLine);
|
||||
|
||||
// adjust original errors
|
||||
const newLineRange = sectionContent.split(/\r?\n/).length;
|
||||
const originalRange = endLine - startLine + 1;
|
||||
this.adjustRangeForErrors(this.Luresource.Errors, newLineRange - originalRange, endLine);
|
||||
|
||||
// adjust updated sections' errors
|
||||
const offset = oldSection.Range.Start.Line - newResource.Sections[0].Range.Start.Line;
|
||||
this.adjustRangeForErrors(newResource.Errors, offset);
|
||||
this.Luresource.Errors.push(...newResource.Errors);
|
||||
|
||||
this.Luresource.Content = this.replaceRangeContent(this.Luresource.Content, oldSection.Range.Start.Line - 1, oldSection.Range.End.Line - 1, sectionContent);
|
||||
this.adjustRangeForUpdateSection(sectionIndex, newResource.Sections);
|
||||
|
||||
luParser.extractSectionBody(this.Luresource.Sections, this.Luresource.Content);
|
||||
return this.Luresource;
|
||||
}
|
||||
|
||||
deleteSection(id) {
|
||||
const sectionIndex = this.Luresource.Sections.findIndex(u => u.Id === id);
|
||||
if (sectionIndex < 0) {
|
||||
return this;
|
||||
}
|
||||
|
||||
const oldSection = this.Luresource.Sections[sectionIndex];
|
||||
const startLine = oldSection.Range.Start.Line;
|
||||
const endLine = oldSection.Range.End.Line;
|
||||
|
||||
this.removeErrors(this.Luresource.Errors, startLine, endLine);
|
||||
this.adjustRangeForErrors(this.Luresource.Errors, startLine - endLine, endLine);
|
||||
|
||||
this.Luresource.Sections.splice(sectionIndex, 1);
|
||||
this.Luresource.Content = this.replaceRangeContent(this.Luresource.Content, startLine - 1, endLine - 1, undefined);
|
||||
|
||||
const offset = endLine - startLine + 1;
|
||||
this.adjustRangeForDeleteSection(sectionIndex, offset);
|
||||
luParser.extractSectionBody(this.Luresource.Sections, this.Luresource.Content);
|
||||
return this.Luresource;
|
||||
}
|
||||
|
||||
insertSection(id, sectionContent) {
|
||||
// insert into the front of the old section
|
||||
sectionContent = helpers.sanitizeNewLines(sectionContent);
|
||||
const sectionIndex = this.Luresource.Sections.findIndex(u => u.Id === id);
|
||||
|
||||
if (sectionIndex < 0 && this.Luresource.Sections.length > 0) {
|
||||
return this.Luresource;
|
||||
}
|
||||
|
||||
// if secionIndex < 0 and the luresource is empty, just add it
|
||||
if (sectionIndex < 0 && this.Luresource.Sections.length === 0) {
|
||||
return this.addSection(sectionContent);
|
||||
}
|
||||
|
||||
const newResource = luParser.parseWithRef(sectionContent, this.Luresource);
|
||||
if (!newResource) {
|
||||
return this.Luresource;
|
||||
}
|
||||
|
||||
// add a NestedIntentSection may appears multiple sections
|
||||
|
||||
// adjust original errors
|
||||
const newLineRange = sectionContent.split(/\r?\n/).length;
|
||||
const startLine = sectionIndex <= 0 ? 1 : this.Luresource.Sections[sectionIndex].Range.Start.Line;
|
||||
this.adjustRangeForErrors(this.Luresource.Errors, newLineRange, startLine);
|
||||
|
||||
// adjust the insert errors of section
|
||||
this.adjustRangeForErrors(newResource.Errors, startLine - 1);
|
||||
this.Luresource.Errors.push(...newResource.Errors);
|
||||
|
||||
this.Luresource.Content = this.replaceRangeContent(this.Luresource.Content, startLine - 1, startLine - 2, sectionContent);
|
||||
this.adjustRangeForInsertSection(sectionIndex, newResource.Sections);
|
||||
|
||||
luParser.extractSectionBody(this.Luresource.Sections, this.Luresource.Content);
|
||||
return this.Luresource;
|
||||
}
|
||||
|
||||
removeErrors(errors, startLine, endLine) {
|
||||
if (errors && startLine >= 0 && endLine >= startLine) {
|
||||
let index = -1;
|
||||
|
||||
while ((index = errors.findIndex(u =>
|
||||
u.Range && ((u.Range.Start.Line >= startLine && u.Range.Start.Line <= endLine)
|
||||
|| (u.Range.End.Line >= startLine && u.Range.End.Line <= endLine)))) >= 0) {
|
||||
this.Luresource.Errors.splice(index, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
adjustRangeForErrors(errors, offset, startLine, endLine) {
|
||||
if (errors) {
|
||||
if (startLine === undefined && endLine === undefined) {
|
||||
errors.forEach(u => {
|
||||
this.adjustErrorRange(u, offset);
|
||||
});
|
||||
} else if (startLine >= 0 && (endLine === undefined || endLine < startLine)) {
|
||||
errors.forEach(u => {
|
||||
if (u.Range.Start.Line >= startLine) {
|
||||
this.adjustErrorRange(u, offset);
|
||||
}
|
||||
});
|
||||
} else if (startLine >= 0 && endLine >= startLine) {
|
||||
errors.forEach(u => {
|
||||
if (u.Range.Start.Line >= startLine && u.Range.End.Line <= endLine) {
|
||||
this.adjustErrorRange(u, offset);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
adjustErrorRange(error, offset) {
|
||||
if (error && error.Range) {
|
||||
error.Range.Start.Line += offset;
|
||||
error.Range.End.Line += offset;
|
||||
}
|
||||
}
|
||||
|
||||
adjustRangeForAddSection(newSections, offset) {
|
||||
newSections.forEach(u => {
|
||||
this.adjustSectionRange(u, offset);
|
||||
});
|
||||
}
|
||||
|
||||
adjustSectionRange(section, offset) {
|
||||
if (section) {
|
||||
if (section.SimpleIntentSections && section.SectionType === LUSectionTypes.NESTEDINTENTSECTION && section.SimpleIntentSections) {
|
||||
section.SimpleIntentSections.forEach(k => {
|
||||
k.Range.Start.Line += offset;
|
||||
k.Range.End.Line += offset;
|
||||
});
|
||||
}
|
||||
|
||||
section.Range.Start.Line += offset;
|
||||
section.Range.End.Line += offset;
|
||||
}
|
||||
}
|
||||
|
||||
adjustRangeForDeleteSection(index, offset) {
|
||||
for (let i = index; i < this.Luresource.Sections.length; i++) {
|
||||
const section = this.Luresource.Sections[i];
|
||||
this.adjustSectionRange(section, -offset)
|
||||
}
|
||||
}
|
||||
|
||||
adjustRangeForUpdateSection(oldIndex, newSections) {
|
||||
const sectionsSize = newSections.length;
|
||||
const oldStartLine = this.Luresource.Sections[oldIndex].Range.Start.Line;
|
||||
const oldEndLine = this.Luresource.Sections[oldIndex].Range.End.Line;
|
||||
const newStartLine = newSections[0].Range.Start.Line;
|
||||
const newEndLine = newSections[newSections.length - 1].Range.End.Line;
|
||||
|
||||
this.Luresource.Sections.splice(oldIndex, 1, ...newSections);
|
||||
|
||||
// adjust updated sections' range
|
||||
const updateOffset = oldStartLine - this.Luresource.Sections[oldIndex].Range.Start.Line;
|
||||
for (let i = oldIndex; i < oldIndex + sectionsSize; i++) {
|
||||
const section = this.Luresource.Sections[i];
|
||||
this.adjustSectionRange(section, updateOffset);
|
||||
}
|
||||
|
||||
// adjust remaining sections' range
|
||||
const remainingOffset = (newEndLine - newStartLine) - (oldEndLine - oldStartLine);
|
||||
for (let i = oldIndex + sectionsSize; i < this.Luresource.Sections.length; i++) {
|
||||
const section = this.Luresource.Sections[i];
|
||||
this.adjustSectionRange(section, remainingOffset);
|
||||
}
|
||||
}
|
||||
|
||||
adjustRangeForInsertSection(postIndex, newSections) {
|
||||
const sectionsSize = newSections.length;
|
||||
const insertOffset = postIndex <= 0 ? 0 : this.Luresource.Sections[postIndex].Range.Start.Line - 1;
|
||||
const newStartLine = newSections[0].Range.Start.Line;
|
||||
const newEndLine = newSections[newSections.length - 1].Range.End.Line;
|
||||
|
||||
this.Luresource.Sections.splice(postIndex, 0, ...newSections);
|
||||
|
||||
// adjust inserted sections' range
|
||||
for (let i = postIndex; i < postIndex + sectionsSize; i++) {
|
||||
const section = this.Luresource.Sections[i];
|
||||
this.adjustSectionRange(section, insertOffset);
|
||||
}
|
||||
|
||||
// adjust remaining sections' range
|
||||
const remainingOffset = newEndLine - newStartLine + 1;
|
||||
for (let i = postIndex + sectionsSize; i < this.Luresource.Sections.length; i++) {
|
||||
const section = this.Luresource.Sections[i];
|
||||
this.adjustSectionRange(section, remainingOffset);
|
||||
}
|
||||
}
|
||||
|
||||
replaceRangeContent(originString, startLine, stopLine, replaceString) {
|
||||
const originList = originString.split(/\r?\n/);
|
||||
let destList = [];
|
||||
if (isNaN(startLine) || isNaN(stopLine) || startLine < 0 || startLine > stopLine + 1) {
|
||||
throw new Error("index out of range.");
|
||||
}
|
||||
|
||||
destList.push(...originList.slice(0, startLine));
|
||||
|
||||
if (replaceString) {
|
||||
destList.push(replaceString);
|
||||
}
|
||||
|
||||
destList.push(...originList.slice(stopLine + 1));
|
||||
|
||||
return destList.join(NEWLINE);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SectionOperator;
|
|
@ -0,0 +1,100 @@
|
|||
const SimpleIntentSectionContext = require('./generated/LUFileParser').LUFileParser.SimpleIntentSectionContext;
|
||||
const EntitySection = require('./entitySection');
|
||||
const NewEntitySection = require('./newEntitySection');
|
||||
const visitor = require('./visitor');
|
||||
const DiagnosticSeverity = require('./diagnostic').DiagnosticSeverity;
|
||||
const BuildDiagnostic = require('./diagnostic').BuildDiagnostic;
|
||||
const LUSectionTypes = require('./../utils/enums/lusectiontypes');
|
||||
const NEWLINE = require('os').EOL;
|
||||
const BaseSection = require('./baseSection');
|
||||
const Range = require('./diagnostic').Range;
|
||||
const Position = require('./diagnostic').Position;
|
||||
|
||||
class SimpleIntentSection extends BaseSection {
|
||||
/**
|
||||
*
|
||||
* @param {SimpleIntentSectionContext} parseTree
|
||||
*/
|
||||
constructor(parseTree, content) {
|
||||
super();
|
||||
this.SectionType = LUSectionTypes.SIMPLEINTENTSECTION;
|
||||
this.UtteranceAndEntitiesMap = [];
|
||||
this.Entities = [];
|
||||
this.Errors = [];
|
||||
this.Body = '';
|
||||
|
||||
if (parseTree) {
|
||||
this.Name = this.ExtractName(parseTree);
|
||||
this.IntentNameLine = this.ExtractIntentNameLine(parseTree);
|
||||
let result = this.ExtractUtteranceAndEntitiesMap(parseTree);
|
||||
this.UtteranceAndEntitiesMap = result.utteranceAndEntitiesMap;
|
||||
this.Errors = result.errors;
|
||||
this.Id = `${this.SectionType}_${this.Name}`;
|
||||
const startPosition = new Position(parseTree.start.line, parseTree.start.column);
|
||||
const stopPosition = new Position(parseTree.stop.line, parseTree.stop.column + parseTree.stop.text.length);
|
||||
this.Range = new Range(startPosition, stopPosition);
|
||||
}
|
||||
}
|
||||
|
||||
ExtractName(parseTree) {
|
||||
return parseTree.intentDefinition().intentNameLine().intentName().getText().trim();
|
||||
}
|
||||
|
||||
ExtractIntentNameLine(parseTree) {
|
||||
return parseTree.intentDefinition().intentNameLine().getText().trim();
|
||||
}
|
||||
|
||||
ExtractUtteranceAndEntitiesMap(parseTree) {
|
||||
let utteranceAndEntitiesMap = [];
|
||||
let errors = [];
|
||||
if (parseTree.intentDefinition().intentBody() && parseTree.intentDefinition().intentBody().normalIntentBody()) {
|
||||
for (const errorIntentStr of parseTree.intentDefinition().intentBody().normalIntentBody().errorString()) {
|
||||
if (errorIntentStr.getText().trim() !== '') {
|
||||
errors.push(BuildDiagnostic({
|
||||
message: "Invalid intent body line, did you miss '-' at line begin",
|
||||
context: errorIntentStr
|
||||
}))}
|
||||
}
|
||||
|
||||
for (const normalIntentStr of parseTree.intentDefinition().intentBody().normalIntentBody().normalIntentString()) {
|
||||
let utteranceAndEntities;
|
||||
try {
|
||||
utteranceAndEntities = visitor.visitNormalIntentStringContext(normalIntentStr);
|
||||
}
|
||||
catch (err) {
|
||||
errors.push(BuildDiagnostic({
|
||||
message: "Invalid utterance definition found. Did you miss a '{' or '}'?",
|
||||
context: normalIntentStr
|
||||
}))
|
||||
};
|
||||
if (utteranceAndEntities !== undefined) {
|
||||
utteranceAndEntities.contextText = normalIntentStr.getText();
|
||||
const startPosition = new Position(normalIntentStr.start.line, normalIntentStr.start.column);
|
||||
const stopPosition = new Position(normalIntentStr.stop.line, normalIntentStr.stop.column + normalIntentStr.stop.text.length);
|
||||
utteranceAndEntities.range = new Range(startPosition, stopPosition);
|
||||
|
||||
utteranceAndEntitiesMap.push(utteranceAndEntities);
|
||||
utteranceAndEntities.errorMsgs.forEach(errorMsg => errors.push(BuildDiagnostic({
|
||||
message: errorMsg,
|
||||
context: normalIntentStr
|
||||
})));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (utteranceAndEntitiesMap.length === 0) {
|
||||
let errorMsg = `no utterances found for intent definition: "# ${this.Name}"`
|
||||
let error = BuildDiagnostic({
|
||||
message: errorMsg,
|
||||
context: parseTree.intentDefinition().intentNameLine(),
|
||||
severity: DiagnosticSeverity.WARN
|
||||
})
|
||||
|
||||
errors.push(error);
|
||||
}
|
||||
|
||||
return { utteranceAndEntitiesMap, errors };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SimpleIntentSection;
|
|
@ -0,0 +1,383 @@
|
|||
/**
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const fetch = require('node-fetch');
|
||||
const PARSERCONSTS = require('./../utils/enums/parserconsts');
|
||||
const retCode = require('./../utils/enums/CLI-errors');
|
||||
const chalk = require('chalk');
|
||||
const exception = require('./../utils/exception');
|
||||
const helpers = require('./../utils/helpers');
|
||||
const NEWLINE = require('os').EOL;
|
||||
const MAX_TRANSLATE_BATCH_SIZE = 25;
|
||||
const MAX_CHAR_IN_REQUEST = 4990;
|
||||
|
||||
const translateHelpers = {
|
||||
/**
|
||||
* Helper function to parseAndTranslate lu file content
|
||||
* @param {string} fileContent file content
|
||||
* @param {string} subscriptionKey translate text API key
|
||||
* @param {string} to_lang language code to translate content to
|
||||
* @param {string} src_lang language code for source content
|
||||
* @param {boolean} translate_comments translate comments in .lu files if this is set to true
|
||||
* @param {boolean} translate_link_text translate URL or LU reference link text in .lu files if this is set to true
|
||||
* @param {boolean} log indicates if this function should write verbose messages to process.stdout
|
||||
* @param {number} batch_translate indicates number of input lines to batch up before calling translation API
|
||||
* @returns {string} Localized file content
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
parseAndTranslate : async function(fileContent, subscriptionKey, to_lang, src_lang, translate_comments, translate_link_text, log, batch_translate) {
|
||||
let batch_translate_size = batch_translate ? parseInt(batch_translate) : MAX_TRANSLATE_BATCH_SIZE;
|
||||
fileContent = helpers.sanitizeNewLines(fileContent);
|
||||
let linesInFile = fileContent.split(NEWLINE);
|
||||
let linesToTranslate = [];
|
||||
let localizedContent = '';
|
||||
let currentSectionType = '';
|
||||
let inAnswer = false;
|
||||
let lineCtr = 0;
|
||||
for(let lineIndex in linesInFile) {
|
||||
lineCtr++;
|
||||
let currentLine = linesInFile[lineIndex].trim();
|
||||
// is current line a comment?
|
||||
if(currentLine.indexOf(PARSERCONSTS.COMMENT) === 0) {
|
||||
if (inAnswer) {
|
||||
addSegment(linesToTranslate, currentLine, true);
|
||||
addSegment(linesToTranslate, NEWLINE, false);
|
||||
continue;
|
||||
}
|
||||
if(translate_comments) {
|
||||
addSegment(linesToTranslate, currentLine.charAt(0), false);
|
||||
addSegment(linesToTranslate, currentLine.substring(1), true);
|
||||
} else {
|
||||
addSegment(linesToTranslate, currentLine, false);
|
||||
}
|
||||
} else if (currentLine.indexOf(PARSERCONSTS.FILTER) === 0) {
|
||||
addSegment(linesToTranslate, currentLine, false);
|
||||
currentSectionType = PARSERCONSTS.FILTER;
|
||||
} else if (currentLine.indexOf(PARSERCONSTS.INTENT) === 0) {
|
||||
if (inAnswer) {
|
||||
addSegment(linesToTranslate, currentLine, true);
|
||||
addSegment(linesToTranslate, NEWLINE, false);
|
||||
continue;
|
||||
}
|
||||
let intentName = currentLine.substring(currentLine.indexOf(' ') + 1).trim();
|
||||
//is this a QnA?
|
||||
if(intentName.indexOf(PARSERCONSTS.QNA) === 0) {
|
||||
let beforeQuestion = currentLine.substring(0, currentLine.indexOf(' ') + 1);
|
||||
let question = intentName.slice(1).trim();
|
||||
addSegment(linesToTranslate, beforeQuestion + '? ', false);
|
||||
addSegment(linesToTranslate, question, true);
|
||||
currentSectionType = PARSERCONSTS.QNA;
|
||||
} else {
|
||||
// we would not localize intent name but remember we are under intent section
|
||||
currentSectionType = PARSERCONSTS.INTENT;
|
||||
addSegment(linesToTranslate, currentLine, false);
|
||||
}
|
||||
} else if(currentLine.indexOf('-') === 0 ||
|
||||
currentLine.indexOf('*') === 0 ||
|
||||
currentLine.indexOf('+') === 0 ) {
|
||||
if (inAnswer) {
|
||||
addSegment(linesToTranslate, currentLine, true);
|
||||
addSegment(linesToTranslate, NEWLINE, false);
|
||||
continue;
|
||||
}
|
||||
// Fix for #1191. Do not localize meta-data filters for QnA.
|
||||
if (currentSectionType === PARSERCONSTS.FILTER) {
|
||||
addSegment(linesToTranslate, currentLine, false);
|
||||
addSegment(linesToTranslate, NEWLINE, false);
|
||||
continue;
|
||||
}
|
||||
let listSeparator = '';
|
||||
let content = '';
|
||||
switch (currentSectionType) {
|
||||
case PARSERCONSTS.INTENT:
|
||||
listSeparator = currentLine.charAt(0);
|
||||
addSegment(linesToTranslate, listSeparator + ' ', false);
|
||||
content = currentLine.slice(1).trim();
|
||||
let skipChars = ['{', '}', '(', ')', '[', ']', '|', '=']
|
||||
for (let i = 0; i < content.length; i++) {
|
||||
let processedText = ''
|
||||
let tslt = false
|
||||
if (!skipChars.includes(content.charAt(i))) {
|
||||
for (let j = i; j < content.length && !skipChars.includes(content.charAt(j)); j++) {
|
||||
processedText += content.charAt(j)
|
||||
}
|
||||
tslt = true
|
||||
} else if (content.charAt(i) == '{') {
|
||||
for (let j = i; j < content.length && (content.charAt(j) !== '=' && content.charAt(j) !== '}'); j++) {
|
||||
processedText += content.charAt(j)
|
||||
}
|
||||
} else {
|
||||
processedText += content.charAt(i)
|
||||
}
|
||||
|
||||
if (processedText.charAt(0) === ' ') {
|
||||
addSegment(linesToTranslate, ' ', false)
|
||||
}
|
||||
|
||||
addSegment(linesToTranslate, processedText, tslt)
|
||||
content = content.slice(processedText.length)
|
||||
i--
|
||||
}
|
||||
break;
|
||||
case PARSERCONSTS.NEWENTITY:
|
||||
// if current line is a normalized value, add it to the list to localize// strip line of the list separator
|
||||
listSeparator = currentLine.charAt(0);
|
||||
content = currentLine.slice(1).trim();
|
||||
if (content.trim().endsWith(':')) {
|
||||
let normalizedValueAsSynonym = content.replace(/:$/g, '').trim();
|
||||
addSegment(linesToTranslate, `\t- ${normalizedValueAsSynonym}:`, false);
|
||||
addSegment(linesToTranslate, NEWLINE, false);
|
||||
addSegment(linesToTranslate, '\t\t- ', false);
|
||||
addSegment(linesToTranslate, normalizedValueAsSynonym, true);
|
||||
} else {
|
||||
addSegment(linesToTranslate, '\t\t- ', false);
|
||||
addSegment(linesToTranslate, content, true);
|
||||
}
|
||||
break;
|
||||
case PARSERCONSTS.ENTITY:
|
||||
case PARSERCONSTS.QNA:
|
||||
default:
|
||||
// strip line of the list separator
|
||||
listSeparator = currentLine.charAt(0);
|
||||
content = currentLine.slice(1).trim();
|
||||
addSegment(linesToTranslate, listSeparator + ' ', false);
|
||||
addSegment(linesToTranslate, content, true);
|
||||
break;
|
||||
}
|
||||
} else if(currentLine.indexOf(PARSERCONSTS.ENTITY) === 0) {
|
||||
if (inAnswer) {
|
||||
addSegment(linesToTranslate, currentLine, true);
|
||||
addSegment(linesToTranslate, NEWLINE, false);
|
||||
continue;
|
||||
}
|
||||
// we need to localize qna alterations if specified.
|
||||
let entityDef = currentLine.replace(PARSERCONSTS.ENTITY, '').split(':');
|
||||
let entityName = entityDef[0];
|
||||
let entityType = entityDef[1];
|
||||
if(entityType.includes(PARSERCONSTS.QNAALTERATIONS)) {
|
||||
addSegment(linesToTranslate, '$', false);
|
||||
addSegment(linesToTranslate, entityName.trim(), true);
|
||||
addSegment(linesToTranslate, ' : ' + PARSERCONSTS.QNAALTERATIONS + ' = ', false);
|
||||
} else {
|
||||
// we would not localize entity line but remember we are under entity section for list entities
|
||||
// FIX for BF CLI # 121
|
||||
// If list entity, add normalized value to list of synonyms to translate.
|
||||
addSegment(linesToTranslate, currentLine, false);
|
||||
if (entityType.trim().endsWith('=')) {
|
||||
addSegment(linesToTranslate, NEWLINE, false);
|
||||
let normalizedValueAsSynonym = entityType.replace('=', '').trim();
|
||||
addSegment(linesToTranslate, '- ', false);
|
||||
addSegment(linesToTranslate, normalizedValueAsSynonym, true);
|
||||
}
|
||||
}
|
||||
} else if(currentLine.indexOf(PARSERCONSTS.ANSWER) === 0) {
|
||||
if (inAnswer) {
|
||||
let answerData = '';
|
||||
}
|
||||
addSegment(linesToTranslate, currentLine, false);
|
||||
inAnswer = !inAnswer;
|
||||
currentSectionType = PARSERCONSTS.ANSWER;
|
||||
} else if (currentLine.indexOf(PARSERCONSTS.URLORFILEREF) ===0) {
|
||||
if (inAnswer) {
|
||||
addSegment(linesToTranslate, currentLine, true);
|
||||
addSegment(linesToTranslate, NEWLINE, false);
|
||||
continue;
|
||||
}
|
||||
currentSectionType = PARSERCONSTS.URLORFILEREF;
|
||||
if(translate_link_text) {
|
||||
const linkValueRegEx = new RegExp(/\(.*?\)/g);
|
||||
let linkValueList = currentLine.trim().match(linkValueRegEx);
|
||||
let linkValue = linkValueList[0].replace('(','').replace(')','');
|
||||
const linkTextRegEx = new RegExp(/\[.*\]/g);
|
||||
let linkTextList = currentLine.trim().match(linkTextRegEx);
|
||||
let linkTextValue = linkTextList[0].replace('[','').replace(']','');
|
||||
addSegment(linesToTranslate, '[', false);
|
||||
addSegment(linesToTranslate, linkTextValue, true);
|
||||
addSegment(linesToTranslate, ']', false);
|
||||
addSegment(linesToTranslate, '(' + linkValue + ')', false);
|
||||
} else {
|
||||
addSegment(linesToTranslate, currentLine, false);
|
||||
}
|
||||
} else if(currentLine === '') {
|
||||
if (inAnswer) {
|
||||
addSegment(linesToTranslate, NEWLINE, false);
|
||||
continue;
|
||||
}
|
||||
} else if(currentLine.indexOf(PARSERCONSTS.NEWENTITY) === 0) {
|
||||
// Nothing in the entity line should be localized.
|
||||
addSegment(linesToTranslate, currentLine, false);
|
||||
currentSectionType = PARSERCONSTS.NEWENTITY;
|
||||
} else {
|
||||
if (inAnswer) {
|
||||
addSegment(linesToTranslate, currentLine, true);
|
||||
addSegment(linesToTranslate, NEWLINE, false);
|
||||
continue;
|
||||
} else {
|
||||
throw(new exception(retCode.errorCode.INVALID_INPUT_FILE, 'Error: Unexpected line encountered when parsing \n' + '[' + lineIndex + ']:' + currentLine));
|
||||
}
|
||||
}
|
||||
addSegment(linesToTranslate, NEWLINE, false);
|
||||
// do we have any payload to localize? and have we hit the batch size limit?
|
||||
if ((linesToTranslate.length !== 0) && (lineCtr % batch_translate_size === 0)) {
|
||||
try {
|
||||
localizedContent += await batchTranslateText(linesToTranslate, subscriptionKey, to_lang, src_lang, log);
|
||||
linesToTranslate = [];
|
||||
} catch (err) {
|
||||
throw (err)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (linesToTranslate.length !== 0) {
|
||||
try {
|
||||
localizedContent += await batchTranslateText(linesToTranslate, subscriptionKey, to_lang, src_lang, log);
|
||||
linesToTranslate = [];
|
||||
} catch (err) {
|
||||
throw (err)
|
||||
}
|
||||
}
|
||||
return localizedContent;
|
||||
},
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Helper function to call MT rest API to translate content
|
||||
* @param {string} text Text to translate
|
||||
* @param {string} subscriptionKey user provided subscription to text translation API
|
||||
* @param {string} to_lang target language to localize to
|
||||
* @param {string} from_lang source language of text
|
||||
* @returns {object} response from MT call.
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
translateText: async function(text, subscriptionKey, to_lang, from_lang) {
|
||||
let payload = Array.isArray(text) ? text : [{'Text' : text}];
|
||||
let tUri = 'https://api.cognitive.microsofttranslator.com/translate?api-version=3.0&to=' + to_lang + '&includeAlignment=true';
|
||||
if(from_lang) tUri += '&from=' + from_lang;
|
||||
const options = {
|
||||
method: 'POST',
|
||||
body: JSON.stringify (payload),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Ocp-Apim-Subscription-Key' : subscriptionKey,
|
||||
'X-ClientTraceId' : get_guid (),
|
||||
}
|
||||
};
|
||||
const res = await fetch(tUri, options);
|
||||
if (!res.ok) {
|
||||
throw(new exception(retCode.errorCode.TRANSLATE_SERVICE_FAIL,'Text translator service call failed with [' + res.status + '] : ' + res.statusText + '.\nPlease check key & language code validity'));
|
||||
}
|
||||
let data = await res.json();
|
||||
return data;
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Helper function to break down input string if it is longer than MAX_CHAR_IN_REQUEST to translate API
|
||||
* @param {translateLine []} linesToTranslate Array of translateLine objects
|
||||
* @param {string} text text to translate
|
||||
* @param {boolean} localize indicates if the request should be localized or not.
|
||||
* @returns {void}
|
||||
*/
|
||||
const addSegment = function (linesToTranslate, text, localize) {
|
||||
if (text.length >= MAX_CHAR_IN_REQUEST) {
|
||||
// break it up into smaller segments and add it to the batchRequest payload
|
||||
let splitRegExp = new RegExp(`(.{${MAX_CHAR_IN_REQUEST}})`);
|
||||
let splitLine = text.split(splitRegExp).filter(O => O);
|
||||
splitLine.forEach(item => {
|
||||
linesToTranslate.push(new translateLine(item, localize));
|
||||
})
|
||||
} else {
|
||||
linesToTranslate.push(new translateLine(text, localize));
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Helper function to batch calls to translate API
|
||||
* @param {translateLine []} linesToTranslate Array of translateLine objects
|
||||
* @param {string} subscriptionKey translate text API key
|
||||
* @param {string} to_lang language code to translate content to
|
||||
* @param {string} src_lang language code for source content
|
||||
* @param {boolean} log indicates if this function should write verbose messages to process.stdout
|
||||
* @returns {string} translated content
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
const batchTranslateText = async function(linesToTranslate, subscriptionKey, to_lang, src_lang, log) {
|
||||
// responsible for breaking localizable text into chunks that are
|
||||
// - not more than 5000 characters in combined length
|
||||
// - not more than 25 segments in one chunk
|
||||
let retValue = '';
|
||||
if (!Array.isArray(linesToTranslate) || linesToTranslate.length === 0) return retValue;
|
||||
let charCountInChunk = 0;
|
||||
let batchTranslate = [];
|
||||
for (var idx in linesToTranslate) {
|
||||
let item = linesToTranslate[idx];
|
||||
if (item.text.length + charCountInChunk >= MAX_CHAR_IN_REQUEST) {
|
||||
await translateAndMap(batchTranslate, subscriptionKey, to_lang, src_lang, linesToTranslate);
|
||||
batchTranslate = [];
|
||||
charCountInChunk = 0;
|
||||
}
|
||||
let currentBatchSize = batchTranslate.length > 0 ? batchTranslate.length : 1;
|
||||
if (currentBatchSize % MAX_TRANSLATE_BATCH_SIZE === 0) {
|
||||
await translateAndMap(batchTranslate, subscriptionKey, to_lang, src_lang, linesToTranslate);
|
||||
batchTranslate = [];
|
||||
charCountInChunk = 0;
|
||||
}
|
||||
if (item.localize) {
|
||||
item.idx = batchTranslate.length;
|
||||
batchTranslate.push({'Text': item.text});
|
||||
charCountInChunk += item.text.length;
|
||||
}
|
||||
}
|
||||
if (batchTranslate.length !== 0) {
|
||||
await translateAndMap(batchTranslate, subscriptionKey, to_lang, src_lang, linesToTranslate);
|
||||
batchTranslate = [];
|
||||
charCountInChunk = 0;
|
||||
}
|
||||
linesToTranslate.forEach(item => retValue += item.text);
|
||||
if(log) process.stdout.write(chalk.default.gray(retValue));
|
||||
return retValue;
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function to call translate and update text with localized result
|
||||
* @param {object []} batchRequest Array of {'Text':'value'} objects
|
||||
* @param {string} subscriptionKey translate text API key
|
||||
* @param {string} to_lang language code to translate content to
|
||||
* @param {string} src_lang language code for source content
|
||||
* @param {translateLine []} linesToTranslateCopy Array of translateLine objects
|
||||
* @returns {void}
|
||||
*/
|
||||
const translateAndMap = async function (batchRequest, subscriptionKey, to_lang, src_lang, linesToTranslateCopy) {
|
||||
if (batchRequest.length === 0) return;
|
||||
let data;
|
||||
data = await translateHelpers.translateText(batchRequest, subscriptionKey, to_lang, src_lang);
|
||||
data.forEach((item, idx) => {
|
||||
// find the correponding item in linesToTranslate
|
||||
let itemInLine = linesToTranslateCopy.find(item => item.idx === idx);
|
||||
if (itemInLine) {
|
||||
itemInLine.text = item.translations[0].text;
|
||||
itemInLine.idx = -1;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function to create a random guid
|
||||
* @returns {string} GUID
|
||||
*/
|
||||
const get_guid = function () {
|
||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
||||
var r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
|
||||
return v.toString(16);
|
||||
});
|
||||
}
|
||||
|
||||
class translateLine{
|
||||
constructor(text, localize, idx) {
|
||||
this.text = text ? text: '';
|
||||
this.localize = localize ? localize : false;
|
||||
this.idx = idx ? idx : -1;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = translateHelpers;
|
|
@ -0,0 +1,149 @@
|
|||
const lp = require('./generated/LUFileParser').LUFileParser;
|
||||
const LUISObjNameEnum = require('./../utils/enums/luisobjenum');
|
||||
const InvalidCharsInIntentOrEntityName = require('./../utils/enums/invalidchars').InvalidCharsInIntentOrEntityName;
|
||||
|
||||
class Visitor {
|
||||
/**
|
||||
* @param {lp.NormalIntentStringContext} ctx
|
||||
* @returns {object}
|
||||
*/
|
||||
static visitNormalIntentStringContext(ctx) {
|
||||
let utterance = '';
|
||||
let entities = [];
|
||||
let errorMsgs = [];
|
||||
for (const node of ctx.children) {
|
||||
const innerNode = node;
|
||||
switch (innerNode.symbol.type) {
|
||||
case lp.DASH: break;
|
||||
case lp.EXPRESSION: {
|
||||
let tokUtt = this.tokenizeUtterance(innerNode.getText().trim());
|
||||
utterance = this.recurselyResolveTokenizedUtterance(tokUtt, entities, errorMsgs, utterance.trimLeft());
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
utterance = utterance.concat(innerNode.getText());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { utterance: utterance.trim(), entities, errorMsgs };
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param {Object[]} tokUtt
|
||||
* @param {Object[]} entities
|
||||
* @param {Object[]} errorMsgs
|
||||
* @param {String} srcUtterance
|
||||
*/
|
||||
static recurselyResolveTokenizedUtterance(tokUtt, entities, errorMsgs, srcUtterance) {
|
||||
for (const item of tokUtt) {
|
||||
if (item === Object(item)) {
|
||||
let entityName = item.entityName.trim()
|
||||
if (entityName && InvalidCharsInIntentOrEntityName.some(x => entityName.includes(x))) {
|
||||
errorMsgs.push(`Invalid utterance line, entity name ${entityName} cannot contain any of the following characters: [<, >, *, %, &, :, \\, $]`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (item.entityValue === undefined) {
|
||||
// we have a pattern.any entity
|
||||
const patternStr = item.role ? `{${item.entityName}:${item.role}}` : `{${item.entityName}}`
|
||||
srcUtterance += patternStr;
|
||||
entities.push({
|
||||
type: LUISObjNameEnum.PATTERNANYENTITY,
|
||||
entity: item.entityName.trim(),
|
||||
role: item.role.trim()
|
||||
})
|
||||
} else {
|
||||
// we have a new entity
|
||||
let newEntity = {
|
||||
type: LUISObjNameEnum.ENTITIES,
|
||||
entity: item.entityName.trim(),
|
||||
role: item.role.trim(),
|
||||
startPos: srcUtterance.length,
|
||||
endPos: undefined
|
||||
};
|
||||
if (item.entityValue === undefined) {
|
||||
errorMsgs.push(`Composite entity "${item.parent.entityName}" includes pattern.any entity "${item.entityName}".\r\n\tComposites cannot include pattern.any entity as a child.`)
|
||||
} else {
|
||||
srcUtterance = this.recurselyResolveTokenizedUtterance(item.entityValue, entities, errorMsgs, srcUtterance).trimLeft();
|
||||
newEntity.endPos = srcUtterance.length - 1;
|
||||
entities.push(newEntity);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
srcUtterance += item;
|
||||
}
|
||||
}
|
||||
return srcUtterance;
|
||||
}
|
||||
/**
|
||||
* @param {string} exp
|
||||
* @returns {object}
|
||||
*/
|
||||
static tokenizeUtterance(exp) {
|
||||
let splitString = [];
|
||||
let curList = splitString;
|
||||
let curEntity = undefined;
|
||||
let entityNameCapture = false;
|
||||
let entityValueCapture = false;
|
||||
let entityRoleCapture = false;
|
||||
exp.split('').forEach(char => {
|
||||
switch(char)
|
||||
{
|
||||
case '{':
|
||||
let newEntity = {entityName : '', role : '', entityValue : undefined, parent : curEntity};
|
||||
curList.push(newEntity);
|
||||
curEntity = newEntity;
|
||||
entityNameCapture = true;
|
||||
entityRoleCapture = false;
|
||||
entityValueCapture = false;
|
||||
break;
|
||||
case '}':
|
||||
curEntity = curEntity.parent || undefined;
|
||||
curList = curEntity != undefined ? curEntity.entityValue : splitString;
|
||||
entityValueCapture = false;
|
||||
entityRoleCapture = false;
|
||||
entityNameCapture = false;
|
||||
break;
|
||||
case '=':
|
||||
curEntity.entityValue = [];
|
||||
curList = curEntity.entityValue;
|
||||
entityNameCapture = false;
|
||||
entityValueCapture = true;
|
||||
entityRoleCapture = false;
|
||||
break;
|
||||
case ':':
|
||||
if (curEntity !== undefined && curEntity.entityName !== '' && entityNameCapture === true) {
|
||||
entityRoleCapture = true;
|
||||
entityNameCapture = false;
|
||||
entityValueCapture = false;
|
||||
} else {
|
||||
curList.push(char);
|
||||
}
|
||||
break;
|
||||
default :
|
||||
if (entityNameCapture) {
|
||||
curEntity.entityName += char;
|
||||
} else if (entityValueCapture) {
|
||||
if (char === ' ') {
|
||||
// we do not want leading spaces
|
||||
if (curList.length !== 0) {
|
||||
curList.push(char);
|
||||
}
|
||||
} else {
|
||||
curList.push(char);
|
||||
}
|
||||
} else if (entityRoleCapture) {
|
||||
curEntity.role += char;
|
||||
} else {
|
||||
curList.push(char);
|
||||
}
|
||||
break;
|
||||
}
|
||||
});
|
||||
return splitString;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Visitor;
|
|
@ -0,0 +1,520 @@
|
|||
const NEWLINE = require('os').EOL;
|
||||
const helperClasses = require('./../lufile/classes/hclasses')
|
||||
const EntityTypeEnum = require('./../utils/enums/luisEntityTypes');
|
||||
|
||||
/**
|
||||
* Parses a Luis object into Lu Content
|
||||
* @param {Luis} luisJSON
|
||||
* @returns {string} Lu Content
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
const luisToLuContent = function(luisJSON){
|
||||
let fileContent = '';
|
||||
let luisObj = new helperClasses.rLuisObj();
|
||||
(luisJSON.intents || []).forEach(function(intent) {
|
||||
luisObj.intents.push(new helperClasses.intent(intent, []));
|
||||
});
|
||||
// add utterances to luisObj
|
||||
updateUtterancesList(luisJSON.utterances, luisObj.intents, 'text');
|
||||
// add patterns to luisObj
|
||||
updateUtterancesList(luisJSON.patterns, luisObj.intents, 'pattern');
|
||||
|
||||
// Parse Intents
|
||||
fileContent += parseIntentsToLu(luisObj, luisJSON)
|
||||
fileContent += parseEntitiesToLu(luisJSON)
|
||||
fileContent += parseToLuPrebuiltEntities(luisJSON)
|
||||
fileContent += handlePhraseLists(luisJSON.model_features);
|
||||
fileContent += handlePhraseLists(luisJSON.phraselists);
|
||||
fileContent += parseToLuClosedLists(luisJSON)
|
||||
fileContent += parseRegExEntitiesToLu(luisJSON.regex_entities)
|
||||
// handle regexEntities in json returned from luis export api
|
||||
// similar with regex_entities
|
||||
fileContent += parseRegExEntitiesToLu(luisJSON.regexEntities)
|
||||
fileContent += parseCompositesToLu(luisJSON)
|
||||
fileContent += parsePatternAnyEntitiesToLu(luisJSON)
|
||||
return fileContent
|
||||
}
|
||||
|
||||
const parseIntentsToLu = function(luisObj, luisJSON){
|
||||
let fileContent = ''
|
||||
fileContent += NEWLINE;
|
||||
fileContent += addAppMetaData(luisJSON);
|
||||
// Add test result if in test mode
|
||||
if (luisJSON.test === true) {
|
||||
fileContent += `> Total passed: ${luisJSON.passNumber}/${luisJSON.count}` + NEWLINE + NEWLINE
|
||||
}
|
||||
fileContent += '> # Intent definitions' + NEWLINE + NEWLINE;
|
||||
|
||||
if(luisObj.intents.length <= 0) {
|
||||
return fileContent
|
||||
}
|
||||
// write out intents and utterances..
|
||||
luisObj.intents.forEach(function(intent) {
|
||||
// Add inherits information if any
|
||||
if (intent.intent.inherits !== undefined) {
|
||||
// > !# @intent.inherits = {name = Web.WebSearch; domain_name = Web; model_name = WebSearch}
|
||||
fileContent += '> !# @intent.inherits = name : ' + intent.intent.name;
|
||||
if (intent.intent.inherits.domain_name !== undefined) {
|
||||
fileContent += '; domain_name : ' + intent.intent.inherits.domain_name;
|
||||
}
|
||||
if (intent.intent.inherits.model_name !== undefined) {
|
||||
fileContent += '; model_name : ' + intent.intent.inherits.model_name;
|
||||
}
|
||||
fileContent += NEWLINE + NEWLINE;
|
||||
}
|
||||
// Add test result if in test mode
|
||||
if (luisJSON.test === true) {
|
||||
fileContent += `> Utterance passed in this intent: ${intent.intent.passNumber}/${intent.intent.count}` + NEWLINE
|
||||
}
|
||||
fileContent += '## ' + intent.intent.name + NEWLINE;
|
||||
fileContent += parseUtterancesToLu(intent.utterances, luisJSON)
|
||||
fileContent += NEWLINE + NEWLINE;
|
||||
if (intent.intent.features) {
|
||||
let rolesAndFeatures = addRolesAndFeatures(intent.intent);
|
||||
if (rolesAndFeatures !== '') {
|
||||
fileContent += `@ intent ${intent.intent.name.includes(' ') ? `"${intent.intent.name}"` : `${intent.intent.name}`}`;
|
||||
fileContent += rolesAndFeatures;
|
||||
fileContent += NEWLINE + NEWLINE;
|
||||
}
|
||||
}
|
||||
});
|
||||
return fileContent
|
||||
}
|
||||
|
||||
const parseUtterancesToLu = function(utterances, luisJSON){
|
||||
let fileContent = ''
|
||||
utterances.forEach(function(utterance) {
|
||||
let updatedText = utterance.text;
|
||||
// Add test result if in test mode and utterance has test result
|
||||
if(luisJSON.test === true && utterance.predictedResult !== undefined){
|
||||
fileContent += parsePredictedResultToLu(utterance, luisJSON)
|
||||
}
|
||||
if(utterance.entities.length >= 0) {
|
||||
// update utterance for each entity
|
||||
let text = utterance.text;
|
||||
// flatten entities
|
||||
let flatEntities = [];
|
||||
Object.assign([], utterance.entities).forEach(entity => flattenEntities(entity, flatEntities));
|
||||
let sortedEntitiesList = objectSortByStartPos(flatEntities);
|
||||
// remove all children
|
||||
sortedEntitiesList.forEach(entity => delete entity.children);
|
||||
let tokenizedText = text.split('');
|
||||
// handle cases where we have both child as well as cases where more than one entity can have the same start position
|
||||
// if there are multiple entities in the same start position, then order them by composite, nDepth, regular entity
|
||||
getEntitiesByPositionList(sortedEntitiesList, tokenizedText);
|
||||
updatedText = tokenizedText.join('');
|
||||
}
|
||||
|
||||
// remove duplicated whitespaces between words inside utterance to make sure they are aligned with the luis portal
|
||||
// as luis portal only keeps one whitespace between words even if you type multiple ones
|
||||
// this will benefit the comparison of lu files that are converted from local and remote luis application
|
||||
if(updatedText) fileContent += '- ' + updatedText.replace(/\s+/g, ' ') + NEWLINE;
|
||||
});
|
||||
return fileContent
|
||||
}
|
||||
|
||||
const flattenEntities = function(entity, flatEntities)
|
||||
{
|
||||
if (entity.children !== undefined && Array.isArray(entity.children) && entity.children.length !== 0) {
|
||||
entity.children.forEach(child => flattenEntities(child, flatEntities));
|
||||
}
|
||||
flatEntities.push(Object.assign({}, entity));
|
||||
}
|
||||
|
||||
const getEntitiesByPositionList = function(entitiesList, tokenizedText) {
|
||||
(entitiesList || []).forEach(entity => {
|
||||
// does this entity have child labels?
|
||||
(entity.children || []).forEach(child => {
|
||||
getEntitiesByPositionList(child.children, tokenizedText);
|
||||
updateTokenizedTextByEntity(tokenizedText, child);
|
||||
})
|
||||
updateTokenizedTextByEntity(tokenizedText, entity);
|
||||
})
|
||||
};
|
||||
|
||||
const updateTokenizedTextByEntity = function(tokenizedText, entity) {
|
||||
if (entity.role !== undefined) {
|
||||
tokenizedText[parseInt(entity.startPos)] = `{@${entity.role}=${tokenizedText[parseInt(entity.startPos)]}`;
|
||||
} else {
|
||||
tokenizedText[parseInt(entity.startPos)] = `{@${entity.entity}=${tokenizedText[parseInt(entity.startPos)]}`;
|
||||
}
|
||||
tokenizedText[parseInt(entity.endPos)] = tokenizedText[parseInt(entity.endPos)] + '}';
|
||||
}
|
||||
|
||||
const parsePredictedResultToLu = function(utterance, luisJSON){
|
||||
let fileContent = ''
|
||||
let updatedText = utterance.text;
|
||||
let intents = []
|
||||
// parse predicted result into the .lu content
|
||||
if(utterance.predictedResult.predictedIntents!==undefined && utterance.predictedResult.predictedIntents.length > 0){
|
||||
for(let intent of utterance.predictedResult.predictedIntents){
|
||||
intents.push(`${intent.intent}(${intent.score})`);
|
||||
}
|
||||
}
|
||||
let passText = utterance.predictedResult.IntentPass? "> PASS." : "> FAIL.";
|
||||
fileContent += passText + " Predicted intent: " + intents.join(', ') + NEWLINE;
|
||||
|
||||
if(utterance.predictedResult.predictedEntities!==undefined) {
|
||||
if (utterance.predictedResult.predictedEntities.length > 0){
|
||||
// update utterance for each entity
|
||||
let text = utterance.text;
|
||||
let sortedEntitiesList = objectSortByStartPos(utterance.predictedResult.predictedEntities);
|
||||
let tokenizedText = text.split('');
|
||||
let nonCompositesInUtterance = sortedEntitiesList.filter(entity => luisJSON.composites.find(composite => composite.name == entity.entity) == undefined);
|
||||
nonCompositesInUtterance.forEach(entity => {
|
||||
if (entity.role !== undefined) {
|
||||
tokenizedText[parseInt(entity.startPos)] = `{@${entity.role}=${tokenizedText[parseInt(entity.startPos)]}`;
|
||||
} else {
|
||||
tokenizedText[parseInt(entity.startPos)] = `{@${entity.entity}=${tokenizedText[parseInt(entity.startPos)]}`;
|
||||
}
|
||||
tokenizedText[parseInt(entity.endPos)] += `}`;
|
||||
})
|
||||
let compositeEntitiesInUtterance = sortedEntitiesList.filter(entity => luisJSON.composites.find(composite => composite.name == entity.entity) != undefined);
|
||||
compositeEntitiesInUtterance.forEach(entity => {
|
||||
if (entity.role !== undefined) {
|
||||
tokenizedText[parseInt(entity.startPos)] = `{@${entity.role}=${tokenizedText[parseInt(entity.startPos)]}`;
|
||||
} else {
|
||||
tokenizedText[parseInt(entity.startPos)] = `{@${entity.entity}=${tokenizedText[parseInt(entity.startPos)]}`;
|
||||
}
|
||||
tokenizedText[parseInt(entity.endPos)] += `}`;
|
||||
})
|
||||
updatedText = tokenizedText.join('');
|
||||
}
|
||||
let passText = utterance.predictedResult.EntityPass ? "> PASS." : "> FAIL.";
|
||||
if(updatedText) fileContent += passText + ' Predicted entities: ' + updatedText + NEWLINE;
|
||||
updatedText = utterance.text;
|
||||
}
|
||||
return fileContent
|
||||
}
|
||||
|
||||
const parseEntitiesToLu = function(luisJson){
|
||||
let fileContent = ''
|
||||
if(!luisJson.entities) {
|
||||
return fileContent
|
||||
}
|
||||
|
||||
fileContent += '> # Entity definitions' + NEWLINE + NEWLINE;
|
||||
luisJson.entities.forEach(function(entity) {
|
||||
if (!entity.children || entity.children.length === 0) {
|
||||
// Add inherits information if any
|
||||
if (entity.inherits !== undefined) {
|
||||
// > !# @intent.inherits = {name = Web.WebSearch; domain_name = Web; model_name = WebSearch}
|
||||
fileContent += '> !# @entity.inherits = name : ' + entity.name;
|
||||
if (entity.inherits.domain_name !== undefined) {
|
||||
fileContent += '; domain_name : ' + entity.inherits.domain_name;
|
||||
}
|
||||
if (entity.inherits.model_name !== undefined) {
|
||||
fileContent += '; model_name : ' + entity.inherits.model_name;
|
||||
}
|
||||
fileContent += NEWLINE + NEWLINE;
|
||||
}
|
||||
fileContent += `@ ${getEntityType(entity.features)} ${writeEntityName(entity.name)}`;
|
||||
fileContent += addRolesAndFeatures(entity);
|
||||
fileContent += NEWLINE + NEWLINE;
|
||||
} else {
|
||||
// handle n-depth entity
|
||||
fileContent += handleNDepthEntity(entity);
|
||||
}
|
||||
});
|
||||
fileContent += NEWLINE;
|
||||
|
||||
return fileContent
|
||||
}
|
||||
|
||||
const writeEntityName = function(entityName) {
|
||||
return entityName.includes(' ') ? `"${entityName}"` : `${entityName}`
|
||||
}
|
||||
|
||||
const parseToLuPrebuiltEntities = function(luisJson){
|
||||
let fileContent = ''
|
||||
if(!luisJson.prebuiltEntities){
|
||||
return fileContent
|
||||
}
|
||||
fileContent += '> # PREBUILT Entity definitions' + NEWLINE + NEWLINE;
|
||||
luisJson.prebuiltEntities.forEach(function(entity) {
|
||||
fileContent += `@ prebuilt ${entity.name}`;
|
||||
fileContent += addRolesAndFeatures(entity);
|
||||
fileContent += NEWLINE + NEWLINE;
|
||||
});
|
||||
fileContent += NEWLINE;
|
||||
return fileContent
|
||||
}
|
||||
|
||||
const parseToLuClosedLists = function(luisJson){
|
||||
let fileContent = ''
|
||||
if(!luisJson.closedLists){
|
||||
return fileContent
|
||||
}
|
||||
fileContent += '> # List entities' + NEWLINE + NEWLINE;
|
||||
luisJson.closedLists.forEach(function(ListItem) {
|
||||
fileContent += `@ list `;
|
||||
fileContent += ListItem.name.includes(' ') ? `"${ListItem.name}"` : `${ListItem.name}`;
|
||||
fileContent += addRolesAndFeatures(ListItem);
|
||||
if (ListItem.subLists.length !== 0) {
|
||||
fileContent += ` = `;
|
||||
fileContent += NEWLINE;
|
||||
}
|
||||
ListItem.subLists.forEach(function(list) {
|
||||
fileContent += `\t- ${list.canonicalForm} :`;
|
||||
fileContent += NEWLINE;
|
||||
list.list.forEach(function(listItem) {
|
||||
fileContent += '\t\t- ' + listItem + NEWLINE;
|
||||
});
|
||||
});
|
||||
fileContent += NEWLINE + NEWLINE;
|
||||
});
|
||||
return fileContent
|
||||
}
|
||||
|
||||
const parseRegExEntitiesToLu = function(regex_entities){
|
||||
let fileContent = ''
|
||||
if(!regex_entities) {
|
||||
return fileContent
|
||||
}
|
||||
fileContent += '> # RegEx entities' + NEWLINE + NEWLINE;
|
||||
regex_entities.forEach(function(regExEntity) {
|
||||
fileContent += `@ regex `;
|
||||
fileContent += regExEntity.name.includes(' ') ? `"${regExEntity.name}"` : regExEntity.name;
|
||||
fileContent += addRolesAndFeatures(regExEntity);
|
||||
if (regExEntity.regexPattern !== '') {
|
||||
fileContent += ` = /${regExEntity.regexPattern}/`;
|
||||
}
|
||||
fileContent += NEWLINE;
|
||||
});
|
||||
fileContent += NEWLINE;
|
||||
return fileContent
|
||||
}
|
||||
|
||||
const parseCompositesToLu = function(luisJson){
|
||||
let fileContent = ''
|
||||
// add composite entities if found in source LUIS JSON
|
||||
if(!luisJson.composites || luisJson.composites.length <= 0) {
|
||||
return fileContent
|
||||
}
|
||||
fileContent += '> # Composite entities' + NEWLINE + NEWLINE;
|
||||
luisJson.composites.forEach(composite => {
|
||||
fileContent += `@ composite `;
|
||||
fileContent += composite.name.includes(' ') ? `"${composite.name}"` : composite.name;
|
||||
fileContent += addRolesAndFeatures(composite);
|
||||
if (composite.children.length > 0) {
|
||||
fileContent += (typeof composite.children[0] == "object") ? ` = [${composite.children.map(item => item.name).join(', ')}]`: ` = [${composite.children.join(', ')}]`;
|
||||
}
|
||||
fileContent += NEWLINE;
|
||||
})
|
||||
return fileContent
|
||||
}
|
||||
|
||||
const parsePatternAnyEntitiesToLu = function(luisJson){
|
||||
let fileContent = ''
|
||||
if(!luisJson.patternAnyEntities || luisJson.patternAnyEntities.length <= 0) {
|
||||
return fileContent;
|
||||
}
|
||||
fileContent += '> # Pattern.Any entities' + NEWLINE + NEWLINE;
|
||||
luisJson.patternAnyEntities.forEach(patternAnyEntity => {
|
||||
// Add inherits information if any
|
||||
if (patternAnyEntity.inherits !== undefined) {
|
||||
// > !# @intent.inherits = {name = Web.WebSearch; domain_name = Web; model_name = WebSearch}
|
||||
fileContent += '> !# @patternAnyEntity.inherits = name : ' + patternAnyEntity.name;
|
||||
if (patternAnyEntity.inherits.domain_name !== undefined) {
|
||||
fileContent += '; domain_name : ' + patternAnyEntity.inherits.domain_name;
|
||||
}
|
||||
if (patternAnyEntity.inherits.model_name !== undefined) {
|
||||
fileContent += '; model_name : ' + patternAnyEntity.inherits.model_name;
|
||||
}
|
||||
fileContent += NEWLINE + NEWLINE;
|
||||
}
|
||||
// For back compat we will only write this if the pattern.any has inherits information.
|
||||
fileContent += `@ patternany `;
|
||||
fileContent += patternAnyEntity.name.includes(' ') ? `"${patternAnyEntity.name}"` : patternAnyEntity.name;
|
||||
fileContent += addRolesAndFeatures(patternAnyEntity);
|
||||
fileContent += NEWLINE;
|
||||
})
|
||||
return fileContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to handle phrase lists both in the new and old property.
|
||||
* @param {Object[]} collection
|
||||
*/
|
||||
const handlePhraseLists = function(collection) {
|
||||
let fileContent = ''
|
||||
if(!collection) {
|
||||
return fileContent
|
||||
}
|
||||
fileContent = '> # Phrase list definitions' + NEWLINE + NEWLINE;
|
||||
collection.forEach(function(entity) {
|
||||
let flags = '';
|
||||
fileContent += `@ phraselist `;
|
||||
fileContent += entity.name.includes(' ') ? `"${entity.name}"` : `${entity.name}`;
|
||||
fileContent += `${(entity.mode ? `(interchangeable)` : ``)}`;
|
||||
if (entity.activated !== undefined && !entity.activated) flags += `disabled`;
|
||||
if (entity.enabledForAllModels !== undefined) {
|
||||
if (entity.enabledForAllModels === true) {
|
||||
flags += (flags !== '') ? `, enabledForAllModels` : `enabledForAllModels`;
|
||||
} else {
|
||||
flags += (flags !== '') ? `, disabledForAllModels` : `disabledForAllModels`;
|
||||
}
|
||||
}
|
||||
if (flags !== '') fileContent += ` ${flags}`;
|
||||
if (entity.words && entity.words !== '') {
|
||||
fileContent += ` = ${NEWLINE}\t- ${entity.words}`;
|
||||
}
|
||||
fileContent += NEWLINE + NEWLINE;
|
||||
});
|
||||
fileContent += NEWLINE;
|
||||
|
||||
return fileContent;
|
||||
}
|
||||
/**
|
||||
* Helper to add application inforamtion metadata
|
||||
* @param {Object} LUISJSON
|
||||
*/
|
||||
const addAppMetaData = function(LUISJSON) {
|
||||
let fileContent = '';
|
||||
if (LUISJSON.name) fileContent += `> !# @app.name = ${LUISJSON.name}` + NEWLINE;
|
||||
if (LUISJSON.desc) fileContent += `> !# @app.desc = ${LUISJSON.desc}` + NEWLINE;
|
||||
if (LUISJSON.versionId) fileContent += `> !# @app.versionId = ${LUISJSON.versionId}` + NEWLINE;
|
||||
if (LUISJSON.culture) fileContent += `> !# @app.culture = ${LUISJSON.culture}` + NEWLINE;
|
||||
if (LUISJSON.luis_schema_version) fileContent += `> !# @app.luis_schema_version = ${LUISJSON.luis_schema_version}` + NEWLINE;
|
||||
if (LUISJSON.settings) {
|
||||
LUISJSON.settings.forEach(setting => {
|
||||
fileContent += `> !# @app.settings.${setting.name} = ${setting.value}` + NEWLINE;
|
||||
})
|
||||
}
|
||||
if (LUISJSON.tokenizerVersion) fileContent += `> !# @app.tokenizerVersion = ${LUISJSON.tokenizerVersion}` + NEWLINE;
|
||||
return fileContent === '' ? fileContent : `> LUIS application information` + NEWLINE + fileContent + NEWLINE + NEWLINE;
|
||||
}
|
||||
/**
|
||||
* Helper function to handle nDepth entity definition
|
||||
* @param {Object} entity
|
||||
*/
|
||||
const handleNDepthEntity = function(entity) {
|
||||
let fileContent = '';
|
||||
const BASE_TAB_STOP = 1;
|
||||
fileContent += `@ ${getEntityType(entity.features)} ${writeEntityName(entity.name)}`;
|
||||
fileContent += addRolesAndFeatures(entity);
|
||||
fileContent += NEWLINE;
|
||||
fileContent += addNDepthChildDefinitions(entity.children, BASE_TAB_STOP, fileContent) + NEWLINE + NEWLINE
|
||||
return fileContent;
|
||||
}
|
||||
/**
|
||||
* Recursive helper function to add child definitions.
|
||||
* @param {Object[]} childCollection
|
||||
* @param {number} tabStop
|
||||
* @param {string} fileContent
|
||||
*/
|
||||
const addNDepthChildDefinitions = function(childCollection, tabStop, fileContent) {
|
||||
let myFileContent = '';
|
||||
(childCollection || []).forEach(child => {
|
||||
myFileContent += "".padStart(tabStop * 4, ' ');
|
||||
myFileContent += `- @ ${getEntityType(child.features)} ${writeEntityName(child.name)}`;
|
||||
myFileContent += addRolesAndFeatures(child);
|
||||
myFileContent += NEWLINE;
|
||||
if (child.children && child.children.length !== 0) {
|
||||
myFileContent += addNDepthChildDefinitions(child.children, tabStop + 1, myFileContent);
|
||||
}
|
||||
});
|
||||
return myFileContent;
|
||||
}
|
||||
const getEntityType = function(features) {
|
||||
// find constraint
|
||||
let constraint = (features || []).find(feature => feature.isRequired == true);
|
||||
if (constraint !== undefined) {
|
||||
return constraint.modelName;
|
||||
} else {
|
||||
return EntityTypeEnum.ML;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Helper to construt role and features list for an entity
|
||||
* @param {Object} entity
|
||||
* @returns {String} file content to include.
|
||||
*/
|
||||
const addRolesAndFeatures = function(entity) {
|
||||
let roleAndFeatureContent = ''
|
||||
if (entity.roles && entity.roles.length > 0) {
|
||||
roleAndFeatureContent += ` ${entity.roles.length > 1 ? `hasRoles` : `hasRole`} `;
|
||||
entity.roles.forEach(item => {
|
||||
roleAndFeatureContent += item.includes(' ') ? `"${item}",` : `${item},`;
|
||||
})
|
||||
}
|
||||
roleAndFeatureContent = roleAndFeatureContent.substring(0, roleAndFeatureContent.length - 1);
|
||||
if (!entity.features || entity.features.length <= 0) {
|
||||
return roleAndFeatureContent
|
||||
}
|
||||
|
||||
let featuresList = new Array();
|
||||
entity.features.forEach(item => {
|
||||
if (item.featureName) featuresList.push(item.featureName);
|
||||
if (item.modelName) {
|
||||
if (item.isRequired !== undefined) {
|
||||
if (item.isRequired !== true)
|
||||
featuresList.push(item.modelName);
|
||||
} else {
|
||||
featuresList.push(item.modelName);
|
||||
}
|
||||
}
|
||||
})
|
||||
if (featuresList.length > 0) {
|
||||
roleAndFeatureContent += ` ${featuresList.length > 1 ? `usesFeatures` : `usesFeature`} `;
|
||||
featuresList.forEach(feature => {
|
||||
roleAndFeatureContent += feature.includes(' ') ? `"${feature}",` : `${feature},`;
|
||||
});
|
||||
roleAndFeatureContent = roleAndFeatureContent.substring(0, roleAndFeatureContent.length - 1);
|
||||
}
|
||||
|
||||
//${featuresList.join(',')}`;
|
||||
return roleAndFeatureContent
|
||||
}
|
||||
|
||||
/**
|
||||
* helper function to add utterances to collection if it does not exist
|
||||
* @param {object[]} tgtCollection target collection of utterance objects
|
||||
* @param {object []} srcCollection source collection of utterance objects
|
||||
* @param {string} attribute attribute to check on and copy over
|
||||
* @returns {void}
|
||||
*/
|
||||
const updateUtterancesList = function (srcCollection, tgtCollection, attribute) {
|
||||
(srcCollection || []).forEach(srcItem => {
|
||||
let matchInTarget = tgtCollection.find(item => item.intent.name == srcItem.intent);
|
||||
if(!matchInTarget || matchInTarget.utterances.length === 0) {
|
||||
addUtteranceToCollection(attribute, srcItem, matchInTarget);
|
||||
return;
|
||||
}
|
||||
if(!matchInTarget.utterances.find(item => item.text == srcItem[attribute])) {
|
||||
addUtteranceToCollection(attribute, srcItem, matchInTarget);
|
||||
return;
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* helper function to add utterances to collection based on src type (pattern or utterance)
|
||||
* @param {string} attribute attribute to check on and copy over
|
||||
* @param {object} srcItem source object
|
||||
* @param {object []} matchInTarget target collection of objects
|
||||
* @returns {void}
|
||||
*/
|
||||
const addUtteranceToCollection = function (attribute, srcItem, matchInTarget) {
|
||||
if(attribute === 'text') {
|
||||
matchInTarget.utterances.push(srcItem);
|
||||
} else {
|
||||
matchInTarget.utterances.push(new helperClasses.uttereances(srcItem.pattern.replace('{', '{@'),srcItem.intent,[]));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* helper function sort entities list by starting position
|
||||
* @param {object} objectArray array of entity objects
|
||||
* @returns {object} sorted entities array by start position
|
||||
*/
|
||||
const objectSortByStartPos = function (objectArray) {
|
||||
let ObjectByStartPos = objectArray.slice(0);
|
||||
ObjectByStartPos.sort(function(a,b) {
|
||||
if (a.startPos === b.startPos)
|
||||
return a.endPos - b.endPos;
|
||||
return a.startPos - b.startPos;
|
||||
});
|
||||
return ObjectByStartPos;
|
||||
}
|
||||
|
||||
module.exports = luisToLuContent
|
|
@ -0,0 +1,72 @@
|
|||
const validator = require('./luisValidator')
|
||||
const luConverter = require('./luConverter')
|
||||
const helpers = require('./../utils/helpers')
|
||||
const LU = require('./../lu/lu')
|
||||
|
||||
class Luis {
|
||||
constructor(LuisJSON = null){
|
||||
this.intents = [];
|
||||
this.entities = [];
|
||||
this.composites = [];
|
||||
this.closedLists = [];
|
||||
this.regex_entities = [];
|
||||
this.model_features = [];
|
||||
this.regex_features = [];
|
||||
this.utterances = [];
|
||||
this.patterns = [];
|
||||
this.patternAnyEntities = [];
|
||||
this.prebuiltEntities = [];
|
||||
// fix for #255
|
||||
this.luis_schema_version = "3.2.0";
|
||||
this.versionId = "0.1";
|
||||
this.name = "";
|
||||
this.desc = "";
|
||||
this.culture = "en-us";
|
||||
|
||||
if (LuisJSON) {
|
||||
initialize(this, LuisJSON)
|
||||
}
|
||||
}
|
||||
|
||||
parseToLuContent(){
|
||||
helpers.checkAndUpdateVersion(this)
|
||||
helpers.cleanUpExplicitEntityProperty(this)
|
||||
return luConverter(this)
|
||||
}
|
||||
|
||||
parseToLU(){
|
||||
return new LU(this.parseToLuContent())
|
||||
}
|
||||
|
||||
validate() {
|
||||
return validator(this)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Luis
|
||||
|
||||
const initialize = function(instance, LuisJSON) {
|
||||
for (let prop in instance) {
|
||||
instance[prop] = LuisJSON[prop];
|
||||
}
|
||||
|
||||
// add regexEntities property that returned from luis export api
|
||||
const regexEntities = 'regexEntities';
|
||||
if (Object.keys(LuisJSON).includes(regexEntities)) instance[regexEntities] = LuisJSON[regexEntities];
|
||||
|
||||
settingsAndTokenizerCheck(instance, LuisJSON)
|
||||
|
||||
initializeEntities(instance)
|
||||
}
|
||||
|
||||
const initializeEntities = function (instance) {
|
||||
(instance.entities || []).forEach(e => e.explicitlyAdded = true)
|
||||
}
|
||||
|
||||
const settingsAndTokenizerCheck = function(instance, LuisJSON) {
|
||||
const adds = ['tokenizerVersion', 'settings', 'phraselists']
|
||||
for (let i = 0 ; i < adds.length; i++) {
|
||||
if(!LuisJSON[adds[i]]) continue
|
||||
instance[adds[i]]= LuisJSON[adds[i]];
|
||||
}
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const Luis = require('./luis')
|
||||
const parseFileContents = require('./../lufile/parseFileContents').parseFile
|
||||
const build = require('./luisCollate').build
|
||||
const LU = require('./../lu/lu')
|
||||
class LuisBuilder {
|
||||
|
||||
/**
|
||||
* Builds a Luis instance from a Luis json.
|
||||
* @param {JSON} luisJson Luis json
|
||||
* @returns {Luis} new Luis instance
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
static fromJson(luisJson) {
|
||||
return new Luis(luisJson)
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a Luis instance from a Lu Content.
|
||||
* @param {string} luContent LU content
|
||||
* @returns {Luis} new Luis instance
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
static async fromContentAsync(luContent) {
|
||||
return await parseAndValidateLuFile(luContent)
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a Luis instance from a Lu list.
|
||||
* @param {Array<LU>} luArray Array of LU files to be merge
|
||||
* @param {function} luSearchFn function to retrieve the lu files found in the references
|
||||
* @returns {Luis} new Luis instance
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
static async fromLUAsync(luArray, luSearchFn) {
|
||||
if(!Array.isArray(luArray)){
|
||||
if (luArray instanceof LU)
|
||||
luArray = new Array(luArray)
|
||||
else
|
||||
luArray = new Array(new LU(luArray))
|
||||
}
|
||||
let parsedContent = await build(luArray, false, '', luSearchFn)
|
||||
parsedContent.validate()
|
||||
return parsedContent
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = LuisBuilder
|
||||
|
||||
const parseAndValidateLuFile = async function(luContent, log = undefined, culture = undefined) {
|
||||
let parsedContent = await parseFileContents(luContent, log, culture)
|
||||
let LUISObj = new Luis(parsedContent.LUISJsonStructure)
|
||||
LUISObj.validate()
|
||||
return LUISObj
|
||||
}
|
|
@ -0,0 +1,412 @@
|
|||
|
||||
const deepEqual = require('deep-equal')
|
||||
const LUISObjNameEnum = require('./../utils/enums/luisobjenum')
|
||||
const Luis = require('./luis')
|
||||
const helpers = require('./../utils/helpers')
|
||||
const mergeLuFiles = require('./../lu/luMerger').Build
|
||||
const exception = require('./../utils/exception')
|
||||
const retCode = require('../utils/enums/CLI-errors')
|
||||
|
||||
/**
|
||||
* Builds a Luis instance from a Lu list.
|
||||
* @param {Array<Lu>} luObjArray Array of LU files to be merge
|
||||
* @param {boolean} log indicates if we need verbose logging.
|
||||
* @param {string} luis_culture LUIS locale code
|
||||
* @param {function} luSearchFn function to retrieve the lu files found in the references
|
||||
* @returns {Luis} new Luis instance
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
const build = async function(luArray, verbose, luis_culture, luSearchFn) {
|
||||
let mergedContent = await mergeLuFiles(luArray, verbose, luis_culture, luSearchFn)
|
||||
let parsedLUISList = mergedContent.LUISContent.filter(item => item.includeInCollate)
|
||||
if (parsedLUISList.length === 0) return new Luis()
|
||||
let luisList = []
|
||||
parsedLUISList.forEach(i => {
|
||||
luisList.push(i.LUISJsonStructure)
|
||||
});
|
||||
return collate(luisList)
|
||||
}
|
||||
|
||||
/**
|
||||
* Collates a list of Luis instances into one.
|
||||
* @param {Array<Luis>} luisList Array of Luis instances to be collate
|
||||
* @param {Luis} luisObject Luis instances to collate with
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
const collate = function(luisList) {
|
||||
if (luisList.length === 0) return
|
||||
let luisObject = new Luis(luisList[0])
|
||||
let hashTable = {};
|
||||
initializeHash(luisObject, hashTable)
|
||||
for(let i = 1; i < luisList.length; i++) {
|
||||
let blob = luisList[i]
|
||||
mergeResults(blob, luisObject, LUISObjNameEnum.INTENT);
|
||||
mergeResults(blob, luisObject, LUISObjNameEnum.ENTITIES);
|
||||
mergeNDepthEntities(blob.entities, luisObject.entities);
|
||||
mergeResults_closedlists(blob, luisObject, LUISObjNameEnum.CLOSEDLISTS);
|
||||
mergeResults(blob, luisObject, LUISObjNameEnum.PATTERNANYENTITY);
|
||||
mergeResultsWithHash(blob, luisObject, LUISObjNameEnum.UTTERANCE, hashTable);
|
||||
mergeResultsWithHash(blob, luisObject, LUISObjNameEnum.PATTERNS, hashTable);
|
||||
buildRegex(blob, luisObject)
|
||||
buildPrebuiltEntities(blob, luisObject)
|
||||
buildModelFeatures(blob, luisObject)
|
||||
buildComposites(blob, luisObject)
|
||||
buildPatternAny(blob, luisObject)
|
||||
}
|
||||
helpers.checkAndUpdateVersion(luisObject)
|
||||
helpers.cleanUpExplicitEntityProperty(luisObject)
|
||||
cleanupEntities(luisObject)
|
||||
return luisObject
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
collate,
|
||||
build
|
||||
}
|
||||
|
||||
const cleanupEntities = function(luisObject) {
|
||||
let consolidatedList = [];
|
||||
luisObject.composites.forEach(item => consolidatedList.push(item));
|
||||
luisObject.closedLists.forEach(item => consolidatedList.push(item));
|
||||
luisObject.regex_entities.forEach(item => consolidatedList.push(item));
|
||||
luisObject.prebuiltEntities.forEach(item => consolidatedList.push(item));
|
||||
let idxToRemove = [];
|
||||
luisObject.entities.forEach((item, idx) => {
|
||||
if (consolidatedList.find(e => e.name == item.name) !== undefined) idxToRemove.push(idx);
|
||||
})
|
||||
idxToRemove.sort((a, b) => a-b).forEach(idx => luisObject.entities.splice(idx, 1))
|
||||
}
|
||||
|
||||
const mergeResultsWithHash = function (blob, finalCollection, type, hashTable) {
|
||||
if (blob[type] === undefined || blob[type].length === 0) {
|
||||
return
|
||||
}
|
||||
blob[type].forEach(function (blobItem) {
|
||||
// add if this item if it does not already exist by hash look up.
|
||||
let hashCode = helpers.hashCode(JSON.stringify(blobItem));
|
||||
if (!hashTable[hashCode]) {
|
||||
finalCollection[type].push(blobItem);
|
||||
hashTable[hashCode] = blobItem;
|
||||
} else {
|
||||
let item = hashTable[hashCode];
|
||||
|
||||
if (type !== LUISObjNameEnum.INTENT &&
|
||||
type !== LUISObjNameEnum.PATTERNS &&
|
||||
type !== LUISObjNameEnum.UTTERANCE &&
|
||||
item.name === blobItem.name) {
|
||||
// merge roles
|
||||
(blobItem.roles || []).forEach(blobRole => {
|
||||
if (item.roles &&
|
||||
!item.roles.includes(blobRole)) {
|
||||
item.roles.push(blobRole);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
const mergeNDepthEntities = function (blob, finalCollection) {
|
||||
let nDepthInBlob = (blob || []).filter(x => x.children !== undefined && Array.isArray(x.children) && x.children.length !== 0);
|
||||
if (nDepthInBlob === undefined) return;
|
||||
nDepthInBlob.forEach(item => {
|
||||
let itemExistsInFinal = (finalCollection || []).find(x => x.name == item.name);
|
||||
if (itemExistsInFinal === undefined) {
|
||||
finalCollection.push(item);
|
||||
} else {
|
||||
// de-dupe and merge roles
|
||||
(item.roles || []).forEach(r => {
|
||||
if (itemExistsInFinal.roles === undefined) {
|
||||
itemExistsInFinal.roles = [r];
|
||||
} else {
|
||||
if (!itemExistsInFinal.roles.includes(r)) {
|
||||
itemExistsInFinal.roles.push(r);
|
||||
}
|
||||
}
|
||||
})
|
||||
// de-dupe and merge children
|
||||
if (item.children !== undefined && Array.isArray(item.children) && item.children.length !== 0) {
|
||||
recursivelyMergeChildrenAndFeatures(item.children, itemExistsInFinal.children)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const recursivelyMergeChildrenAndFeatures = function(srcChildren, tgtChildren) {
|
||||
if (tgtChildren === undefined || !Array.isArray(tgtChildren) || tgtChildren.length === 0) {
|
||||
tgtChildren = srcChildren;
|
||||
return;
|
||||
}
|
||||
(srcChildren || []).forEach(item => {
|
||||
// find child in tgt
|
||||
let itemExistsInFinal = (tgtChildren || []).find(x => x.name == item.name);
|
||||
if (itemExistsInFinal === undefined) {
|
||||
tgtChildren.push(item);
|
||||
} else {
|
||||
// merge features
|
||||
if (item.features !== undefined && item.features.length !== 0) {
|
||||
// merge and verify type
|
||||
let typeForFinalItem = (itemExistsInFinal.features || []).find(t => t.isRequired == true);
|
||||
let typeForItem = (item.features || []).find(t1 => t1.isRequired == true);
|
||||
if (typeForFinalItem !== undefined) {
|
||||
if (typeForItem !== undefined) {
|
||||
if (typeForFinalItem.modelName !== typeForItem.modelName) {
|
||||
throw new exception(retCode.errorCode.INVALID_REGEX_ENTITY, `Child entity ${item.name} does not have consistent type definition. Please verify all definitions for this entity.`)
|
||||
}
|
||||
}
|
||||
}
|
||||
item.features.forEach(f => {
|
||||
let featureInFinal = (itemExistsInFinal.features || []).find(itFea => {
|
||||
return ((itFea.featureName !== undefined && itFea.featureName == f.featureName) ||
|
||||
(itFea.modelName !== undefined && itFea.modelName == f.modelName))
|
||||
});
|
||||
if (featureInFinal === undefined) {
|
||||
itemExistsInFinal.features.push(f);
|
||||
} else {
|
||||
// throw if isRequired is not the same.
|
||||
if (featureInFinal.isRequired !== f.isRequired) {
|
||||
throw new exception(retCode.errorCode.INVALID_REGEX_ENTITY, `Feature ${f.featureName} does not have consistent definition for entity ${item.name}. Please verify all definitions for this feature for this entity.`)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
// de-dupe and merge children
|
||||
if (item.children !== undefined && Array.isArray(item.children) && item.children.length !== 0) {
|
||||
recursivelyMergeChildrenAndFeatures(item.children, itemExistsInFinal.children)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
/**
|
||||
* Helper function to merge item if it does not already exist
|
||||
*
|
||||
* @param {object} blob Contents of all parsed file blobs
|
||||
* @param {object} finalCollection Reference to the final collection of items
|
||||
* @param {LUISObjNameEnum} type Enum type of possible LUIS object types
|
||||
* @returns {void} Nothing
|
||||
*/
|
||||
const mergeResults = function (blob, finalCollection, type) {
|
||||
if (blob[type] === undefined || blob[type].length === 0) {
|
||||
return
|
||||
}
|
||||
blob[type].forEach(function (blobItem) {
|
||||
if (finalCollection[type].length === 0) {
|
||||
finalCollection[type].push(blobItem);
|
||||
return;
|
||||
}
|
||||
// add if this item if it does not already exist in final collection
|
||||
let itemExists = false;
|
||||
for (let fIndex in finalCollection[type]) {
|
||||
if (deepEqual(finalCollection[type][fIndex], blobItem)) {
|
||||
itemExists = true;
|
||||
break;
|
||||
}
|
||||
|
||||
// if item name matches, merge roles if available for everything other than intent
|
||||
if (type === LUISObjNameEnum.INTENT ||
|
||||
type === LUISObjNameEnum.PATTERNS ||
|
||||
type === LUISObjNameEnum.UTTERANCE ||
|
||||
finalCollection[type][fIndex].name !== blobItem.name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
itemExists = true;
|
||||
(blobItem.roles || []).forEach(blobRole => {
|
||||
if (finalCollection[type][fIndex].roles &&
|
||||
!finalCollection[type][fIndex].roles.includes(blobRole)) {
|
||||
finalCollection[type][fIndex].roles.push(blobRole);
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
if (!itemExists) {
|
||||
finalCollection[type].push(blobItem);
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to merge closed list item if it does not already exist
|
||||
*
|
||||
* @param {object} blob Contents of all parsed file blobs
|
||||
* @param {object} finalCollection Reference to the final collection of items
|
||||
* @param {LUISObjNameEnum} type Enum type of possible LUIS object types
|
||||
* @returns {void} nothing
|
||||
*/
|
||||
const mergeResults_closedlists = function (blob, finalCollection, type) {
|
||||
if (blob[type] === undefined || blob[type].length === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
blob[type].forEach(function (blobItem) {
|
||||
let listInFinal = helpers.filterMatch(finalCollection[type], 'name', blobItem.name);
|
||||
if (listInFinal.length === 0) {
|
||||
finalCollection[type].push(blobItem);
|
||||
} else {
|
||||
blobItem.subLists.forEach(function (blobSLItem) {
|
||||
// see if there is a sublist match in listInFinal
|
||||
let slInFinal = helpers.filterMatch(listInFinal[0].subLists, 'canonicalForm', blobSLItem.canonicalForm);
|
||||
if (slInFinal.length === 0) {
|
||||
listInFinal[0].subLists.push(blobSLItem);
|
||||
} else {
|
||||
// there is a canonical form match. See if the values all exist
|
||||
blobSLItem.list.forEach(function (listItem) {
|
||||
if (!slInFinal[0].list.includes(listItem)) slInFinal[0].list.push(listItem);
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
// merge roles if they are different
|
||||
(blobItem.roles || []).forEach(blobRole => {
|
||||
if (!listInFinal[0].roles.includes(blobRole)) {
|
||||
listInFinal[0].roles.push(blobRole);
|
||||
}
|
||||
})
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const buildRegex = function(blob, FinalLUISJSON){
|
||||
// do we have regex entities here?
|
||||
if (blob.regex_entities === undefined || blob.regex_entities.length === 0) {
|
||||
return
|
||||
}
|
||||
blob.regex_entities.forEach(function (regexEntity) {
|
||||
// do we have the same entity in final?
|
||||
let entityExistsInFinal = (FinalLUISJSON.regex_entities || []).find(item => item.name == regexEntity.name);
|
||||
if (entityExistsInFinal === undefined) {
|
||||
FinalLUISJSON.regex_entities.push(regexEntity);
|
||||
} else {
|
||||
// verify that the pattern is the same
|
||||
if (entityExistsInFinal.regexPattern !== regexEntity.regexPattern) {
|
||||
throw (new exception(retCode.errorCode.INVALID_REGEX_ENTITY, `[ERROR]: RegEx entity : ${regexEntity.name} has inconsistent pattern definitions. \n 1. ${regexEntity.regexPattern} \n 2. ${entityExistsInFinal.regexPattern}`));
|
||||
}
|
||||
// merge roles
|
||||
if (entityExistsInFinal.roles.length > 0) {
|
||||
(regexEntity.roles || []).forEach(function (role) {
|
||||
if (!entityExistsInFinal.roles.includes(role))
|
||||
entityExistsInFinal.roles.push(role);
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const buildPrebuiltEntities = function(blob, FinalLUISJSON){
|
||||
// do we have prebuiltEntities here?
|
||||
if (blob.prebuiltEntities === undefined || blob.prebuiltEntities.length === 0) {
|
||||
return
|
||||
}
|
||||
blob.prebuiltEntities.forEach(function (prebuiltEntity) {
|
||||
let prebuiltTypeExists = false;
|
||||
for (let fIndex in FinalLUISJSON.prebuiltEntities) {
|
||||
if (prebuiltEntity.name === FinalLUISJSON.prebuiltEntities[fIndex].name) {
|
||||
// do we have all the roles? if not, merge the roles
|
||||
prebuiltEntity.roles.forEach(function (role) {
|
||||
if (!FinalLUISJSON.prebuiltEntities[fIndex].roles.includes(role)) {
|
||||
FinalLUISJSON.prebuiltEntities[fIndex].roles.push(role);
|
||||
}
|
||||
});
|
||||
prebuiltTypeExists = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!prebuiltTypeExists) {
|
||||
FinalLUISJSON.prebuiltEntities.push(prebuiltEntity);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const buildModelFeatures = function(blob, FinalLUISJSON){
|
||||
// Find what scope to use in blob
|
||||
let blobScope = blob.model_features || blob.phraselists || [];
|
||||
if (blobScope.length === 0) return;
|
||||
|
||||
// Find the finalLuisJson scope to use
|
||||
let finalScope = FinalLUISJSON.model_features || FinalLUISJSON.phraselists;
|
||||
|
||||
blobScope.forEach(function (modelFeature) {
|
||||
let modelFeatureInMaster = helpers.filterMatch(finalScope, 'name', modelFeature.name);
|
||||
if (modelFeatureInMaster.length === 0) {
|
||||
finalScope.push(modelFeature);
|
||||
} else {
|
||||
if (modelFeatureInMaster[0].mode !== modelFeature.mode) {
|
||||
// error.
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT, '[ERROR]: Phrase list : "' + modelFeature.name + '" has conflicting definitions. One marked interchangeable and another not interchangeable'));
|
||||
} else {
|
||||
modelFeature.words.split(',').forEach(function (word) {
|
||||
if (!modelFeatureInMaster[0].words.includes(word)) modelFeatureInMaster[0].words += "," + word;
|
||||
})
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const buildComposites = function(blob, FinalLUISJSON){
|
||||
if (blob.composites === undefined) return;
|
||||
// do we have composites? collate them correctly
|
||||
(blob.composites || []).forEach(composite => {
|
||||
let compositeInMaster = helpers.filterMatch(FinalLUISJSON.composites, 'name', composite.name);
|
||||
if (compositeInMaster.length === 0) {
|
||||
FinalLUISJSON.composites.push(composite);
|
||||
} else {
|
||||
if (JSON.stringify(composite.children.sort()) !== JSON.stringify(compositeInMaster[0].children.sort())) {
|
||||
composite.children.forEach(child => {
|
||||
if (!compositeInMaster[0].children.includes(child)) compositeInMaster[0].children.push(child)
|
||||
})
|
||||
} else {
|
||||
// merge roles
|
||||
(composite.roles || []).forEach(blobRole => {
|
||||
if (!compositeInMaster[0].roles.includes(blobRole)) {
|
||||
compositeInMaster[0].roles.push(blobRole);
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const buildPatternAny = function(blob, FinalLUISJSON){
|
||||
if (blob.patternAnyEntities === undefined) return;
|
||||
// do we have pattern.any entities here?
|
||||
(blob.patternAnyEntities || []).forEach(patternAny => {
|
||||
let paIdx = -1;
|
||||
let patternAnyInMaster = FinalLUISJSON.patternAnyEntities.find((item, idx) => {
|
||||
if (item.name === patternAny.name) {
|
||||
paIdx = idx;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
// verify that this patternAny entity does not exist as any other type
|
||||
let simpleEntityInMaster = FinalLUISJSON.entities.find(item => item.name == patternAny.name);
|
||||
let compositeInMaster = FinalLUISJSON.composites.find(item => item.name == patternAny.name);
|
||||
let listEntityInMaster = FinalLUISJSON.closedLists.find(item => item.name == patternAny.name);
|
||||
let regexEntityInMaster = FinalLUISJSON.regex_entities.find(item => item.name == patternAny.name);
|
||||
let prebuiltInMaster = FinalLUISJSON.prebuiltEntities.find(item => item.name == patternAny.name);
|
||||
if (!simpleEntityInMaster &&
|
||||
!compositeInMaster &&
|
||||
!listEntityInMaster &&
|
||||
!regexEntityInMaster &&
|
||||
!prebuiltInMaster) {
|
||||
if (patternAnyInMaster) {
|
||||
(patternAny.roles || []).forEach(role => !patternAnyInMaster.roles.includes(role) ? patternAnyInMaster.roles.push(role) : undefined);
|
||||
} else {
|
||||
FinalLUISJSON.patternAnyEntities.push(patternAny);
|
||||
}
|
||||
} else {
|
||||
// remove the pattern.any from master if another entity type has this name.
|
||||
if (patternAnyInMaster) {
|
||||
if (paIdx !== -1) FinalLUISJSON.patternAnyEntities.splice(paIdx, 1);
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const initializeHash = function(LuisJSON, hashTable = undefined) {
|
||||
for (let prop in LuisJSON) {
|
||||
if (hashTable !== undefined && (prop === LUISObjNameEnum.UTTERANCE || prop === LUISObjNameEnum.PATTERNS)) {
|
||||
(LuisJSON[prop] || []).forEach(item => hashTable[helpers.hashCode(JSON.stringify(item))] = item)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
const Luis = require('./luis')
|
||||
const propertyHelper = require('./propertyHelper')
|
||||
|
||||
class LuisGen extends Luis{
|
||||
constructor(){
|
||||
super()
|
||||
}
|
||||
|
||||
getInstancesList() {
|
||||
const prebuiltEntities = [];
|
||||
const composites = [];
|
||||
this.prebuiltEntities.forEach(entityList => {
|
||||
prebuiltEntities.push(...entityList);
|
||||
});
|
||||
this.composites.forEach(composite => {
|
||||
composites.push(composite.compositeName);
|
||||
composites.push(...composite.attributes);
|
||||
});
|
||||
const entities = [
|
||||
...this.entities,
|
||||
...prebuiltEntities,
|
||||
...this.closedLists,
|
||||
...this.regex_entities,
|
||||
...this.patternAnyEntities,
|
||||
...composites
|
||||
].map(entity => propertyHelper.jsonPropertyName(entity)).sort();
|
||||
let hi = [...new Set(entities)];
|
||||
return hi;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LuisGen
|
|
@ -0,0 +1,185 @@
|
|||
const LuisGen = require('./luisGen')
|
||||
const propertyHelper = require('./propertyHelper')
|
||||
const error = require('./../utils/exception')
|
||||
const retCode = require('./../utils/enums/CLI-errors')
|
||||
|
||||
class LuisGenBuilder {
|
||||
static build(luisApp) {
|
||||
let buildWithVersion;
|
||||
if (luisApp.luis_schema_version < "5") {
|
||||
buildWithVersion = buildUpToVersion4;
|
||||
} else if (luisApp.luis_schema_version >= "6.0.0") {
|
||||
buildWithVersion = buildVersion6;
|
||||
} else {
|
||||
throw (new error(retCode.errorCode.INVALID_INPUT_FILE, "Invalid LUIS JSON schema version."))
|
||||
}
|
||||
|
||||
return buildWithVersion(luisApp);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LuisGenBuilder
|
||||
|
||||
const buildUpToVersion4 = function(luisApp) {
|
||||
let result = new LuisGen()
|
||||
try {
|
||||
result.intents = processIntents(luisApp.intents);
|
||||
result.entities = extractEntities(luisApp.entities);
|
||||
result.prebuiltEntities = extractEntities(luisApp.prebuiltEntities, true);
|
||||
result.closedLists = extractEntities(luisApp.closedLists);
|
||||
result.regex_entities = extractEntities(luisApp.regex_entities);
|
||||
result.patternAnyEntities = extractEntities(luisApp.patternAnyEntities);
|
||||
result.composites = extractComposites(luisApp.composites);
|
||||
} catch (err) {
|
||||
throw (new error(retCode.errorCode.INVALID_INPUT_FILE, "Invalid LUIS JSON file content."))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
const buildVersion6 = function(luisApp) {
|
||||
let result = new LuisGen()
|
||||
try {
|
||||
result.intents = processIntents(luisApp.intents);
|
||||
result.closedLists = extractEntities(luisApp.closedLists);
|
||||
[result.entities, result.composites] = extractEntitiesV6(luisApp.entities, result.closedLists);
|
||||
result.prebuiltEntities = extractEntities(luisApp.prebuiltEntities, true);
|
||||
result.regex_entities = extractEntities(luisApp.regex_entities);
|
||||
result.patternAnyEntities = extractEntities(luisApp.patternAnyEntities);
|
||||
} catch (err) {
|
||||
throw (new error(retCode.errorCode.INVALID_INPUT_FILE, "Invalid LUIS JSON file content."))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
const processIntents = function(intents) {
|
||||
const result = [];
|
||||
intents.forEach((intent) => {
|
||||
result.push(propertyHelper.normalizeName(intent.name));
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
const extractComposites = function(entities) {
|
||||
const result = [];
|
||||
entities.forEach(entity => {
|
||||
const composite = { compositeName: propertyHelper.normalizeName(entity.name), attributes: [] };
|
||||
entity.roles.sort();
|
||||
entity.roles.forEach(role => {
|
||||
composite.attributes.push(role);
|
||||
});
|
||||
entity.children.forEach(child => {
|
||||
composite.attributes.push(child);
|
||||
});
|
||||
result.push(composite);
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
const extractEntities = function(entities, builtIn = false) {
|
||||
const result = [];
|
||||
entities.forEach(entity => {
|
||||
const aux = [];
|
||||
aux.push(entity.name);
|
||||
entity.roles.sort();
|
||||
entity.roles.forEach(role => {
|
||||
aux.push(role);
|
||||
});
|
||||
if (entity.children !== undefined) {
|
||||
entity.children.forEach(child => {
|
||||
aux.push(child);
|
||||
});
|
||||
}
|
||||
if (builtIn) {
|
||||
result.push(aux);
|
||||
}
|
||||
else {
|
||||
result.push(...aux);
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
const extractEntitiesV6 = function(entities, closedLists) {
|
||||
// This method provides a simplified topological sort to
|
||||
// solve potential instanceOf dependecies in the v6 entities
|
||||
|
||||
const simpleEntitiesResult = [];
|
||||
const compositeEntitiesResult = [];
|
||||
const simpleEntitiesWithType = {};
|
||||
|
||||
// Add 'closedList' entities as valid types for instanceOf
|
||||
closedLists.forEach(listEntity => {
|
||||
simpleEntitiesWithType[listEntity] = 'list'
|
||||
});
|
||||
|
||||
const resolveEntityType = function(entityName) {
|
||||
const entityStack = [];
|
||||
let entityType = simpleEntitiesWithType[entityName];
|
||||
|
||||
while (simpleEntitiesWithType[entityType]){
|
||||
entityStack.push(entityName);
|
||||
entityName = entityType;
|
||||
entityType = simpleEntitiesWithType[entityName];
|
||||
}
|
||||
|
||||
while (entityName) {
|
||||
simpleEntitiesWithType[entityName] = entityType;
|
||||
entityName = entityStack.pop();
|
||||
}
|
||||
}
|
||||
|
||||
const firstPassStack = entities.slice();
|
||||
|
||||
while(firstPassStack.length) {
|
||||
const entity = firstPassStack.pop();
|
||||
|
||||
if (Array.isArray(entity.children) && entity.children.length) {
|
||||
firstPassStack.push(...entity.children);
|
||||
} else if (!entity.children || (Array.isArray(entity.children) && entity.children.length == 0)) {
|
||||
// is simple entity
|
||||
if (entity.instanceOf) {
|
||||
// If the entity order in the schema was not modified by hand,
|
||||
// this algorithm will solve instanceOf dependencies.
|
||||
const last_type = simpleEntitiesWithType[entity.instanceOf] || entity.instanceOf;
|
||||
simpleEntitiesWithType[entity.name] = last_type;
|
||||
}
|
||||
} else {
|
||||
throw CLIError("Malformed JSON: entity.children should be an array");
|
||||
}
|
||||
}
|
||||
|
||||
// This is a second pass for simple entities.
|
||||
// If the JSON was modified by hand and there's a dependency
|
||||
// in the instanceOf field to an entity that appears later,
|
||||
// the type won't be resolved correctly with one pass.
|
||||
for (const entityName in simpleEntitiesWithType) {
|
||||
resolveEntityType(entityName);
|
||||
}
|
||||
|
||||
const processSimpleEntity = function(entity, listToAppend) {
|
||||
listToAppend.push(
|
||||
entity.instanceOf ? {name: entity.name, instanceOf: simpleEntitiesWithType[entity.instanceOf] || entity.instanceOf} : entity.name
|
||||
)
|
||||
}
|
||||
|
||||
const baseParseEntity = function(entityList, childList, topLevel=false) {
|
||||
entityList.forEach(entity => {
|
||||
if (Array.isArray(entity.children) && entity.children.length) {
|
||||
const compositeEntity = { compositeName: propertyHelper.normalizeName(entity.name), attributes: [] };
|
||||
baseParseEntity(entity.children, compositeEntity.attributes);
|
||||
compositeEntitiesResult.push(compositeEntity);
|
||||
if (!topLevel) {
|
||||
childList.push({name: entity.name, compositeInstanceOf: true})
|
||||
}
|
||||
} else {
|
||||
processSimpleEntity(
|
||||
entity,
|
||||
topLevel ? simpleEntitiesResult : childList
|
||||
)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
baseParseEntity(entities, null, true);
|
||||
return [simpleEntitiesResult, compositeEntitiesResult];
|
||||
}
|
|
@ -0,0 +1,380 @@
|
|||
const retCode = require('./../utils/enums/CLI-errors')
|
||||
const helpers = require('./../utils/helpers')
|
||||
const exception = require('./../utils/exception')
|
||||
const BuildDiagnostic = require('./../lufile/diagnostic').BuildDiagnostic;
|
||||
const validateLUIS = function(luisJSON) {
|
||||
// look for entity name collisions - list, simple, patternAny, phraselist
|
||||
// look for list entities labelled
|
||||
// look for prebuilt entity labels in utterances
|
||||
|
||||
let entitiesList = [];
|
||||
let entityFound = '';
|
||||
// add entities to entities list
|
||||
addEntities(luisJSON, entitiesList)
|
||||
// add closed lists to entities list
|
||||
addClosedLists(luisJSON, entitiesList, entityFound)
|
||||
// add pattern any entities to entities list
|
||||
addPatternAnyEntities(luisJSON, entitiesList, entityFound)
|
||||
//add regex entities to entities list.
|
||||
addregexEntities(luisJSON, entitiesList, entityFound)
|
||||
// add any composite entities to entities list.
|
||||
addCompositeEntities(luisJSON, entitiesList, entityFound)
|
||||
// add any pre-built entities to the entities list.
|
||||
addPrebuiltEntities(luisJSON, entitiesList, entityFound)
|
||||
// for each entityFound, see if there are duplicate definitions
|
||||
validateEntities(entitiesList)
|
||||
// do we have utterances with phraselist entities?
|
||||
validateUtterances(luisJSON, entitiesList)
|
||||
// validate composite entities
|
||||
validateComposites(luisJSON, entitiesList)
|
||||
// do boundary validation
|
||||
validateBoundaries(luisJSON);
|
||||
// validate feature assignments
|
||||
validateFeatureAssignments(luisJSON);
|
||||
return true;
|
||||
}
|
||||
const validateBoundaries = function(luisJSON) {
|
||||
// boundaries documented here - https://docs.microsoft.com/en-us/azure/cognitive-services/luis/luis-boundaries
|
||||
|
||||
// intents: 500 per application: 499 custom intents, and the required None intent.
|
||||
if (luisJSON.intents.length > retCode.boundaryLimits.MAX_NUM_INTENTS) {
|
||||
validationError(retCode.errorCode.BOUNDARY_INTENTS, `${luisJSON.intents.length} intents found in application. At most ${retCode.boundaryLimits.MAX_NUM_INTENTS} is allowed.`)
|
||||
}
|
||||
|
||||
// utterances: 15,000 per application - there is no limit on the number of utterances per intent
|
||||
if (luisJSON.utterances.length > retCode.boundaryLimits.MAX_NUM_UTTERANCES) {
|
||||
validationError(retCode.errorCode.BOUNDARY_UTTERANCES, `${luisJSON.utterances.length} utterances found in application. At most ${retCode.boundaryLimits.MAX_NUM_UTTERANCES} is allowed.`)
|
||||
}
|
||||
|
||||
// pattern.any entities - 100 per application
|
||||
if (luisJSON.patternAnyEntities.length > retCode.boundaryLimits.MAX_NUM_PATTERNANY_ENTITIES) {
|
||||
validationError(retCode.errorCode.BOUNDARY_PATTERNANYENTITY, `${luisJSON.patternAnyEntities.length} pattern.any entities found in application. At most ${retCode.boundaryLimits.MAX_NUM_PATTERNANY_ENTITIES} is allowed.`)
|
||||
}
|
||||
|
||||
// Utterances - 500 characters.
|
||||
luisJSON.utterances.forEach(utt => {
|
||||
if (utt.text.length > retCode.boundaryLimits.MAX_CHAR_IN_UTTERANCE) {
|
||||
validationError(retCode.errorCode.BOUNDARY_UTTERANCE_CHAR_LENGTH, `utterance '${utt.text}' under intent '${utt.intent}' has ${utt.text.length} characters. At most ${retCode.boundaryLimits.MAX_CHAR_IN_UTTERANCE} is allowed.`)
|
||||
}
|
||||
})
|
||||
|
||||
// patterns - 500 patterns per application.
|
||||
if (luisJSON.patterns.length > retCode.boundaryLimits.MAX_NUM_PATTERNS) {
|
||||
validationError(retCode.errorCode.BOUNDARY_PATTERNS, `${luisJSON.patterns.length} patterns found in application. At most ${retCode.boundaryLimits.MAX_NUM_PATTERNS} is allowed.`)
|
||||
}
|
||||
|
||||
// patterns - Maximum length of pattern is 400 characters.
|
||||
luisJSON.patterns.forEach(utt => {
|
||||
if (utt.pattern.length > retCode.boundaryLimits.MAX_CHAR_IN_PATTERNS) {
|
||||
validationError(retCode.errorCode.BOUNDARY_PATTERN_CHAR_LIMIT, `Pattern '${utt.pattern}' under intent '${utt.intent}' has ${utt.pattern.length} characters. At most ${retCode.boundaryLimits.MAX_CHAR_IN_PATTERNS} characters are allowed in any pattern.`)
|
||||
}
|
||||
})
|
||||
|
||||
// regex entity - 20 entities.
|
||||
if (luisJSON.regex_entities.length > retCode.boundaryLimits.MAX_NUM_REGEX_ENTITIES) {
|
||||
validationError(retCode.errorCode.BOUNDARY_REGEX_ENTITY, `${luisJSON.regex_entities.length} regex entities found in application. At most ${retCode.boundaryLimits.MAX_NUM_REGEX_ENTITIES} is allowed.`)
|
||||
}
|
||||
|
||||
// regex entity - 500 character max. per regular expression entity pattern
|
||||
luisJSON.regex_entities.forEach(utt => {
|
||||
if (utt.regexPattern.length > retCode.boundaryLimits.MAX_CHAR_REGEX_ENTITY_PATTERN) {
|
||||
validationError(retCode.errorCode.BOUNDARY_REGEX_CHAR_LIMIT, `Regex entity '${utt.name}' with pattern /${utt.regexPattern}/ has ${utt.regexPattern.length} characters. At most ${retCode.boundaryLimits.MAX_CHAR_REGEX_ENTITY_PATTERN} is allowed.`)
|
||||
}
|
||||
})
|
||||
|
||||
// list entities: max 20000 synonyms.
|
||||
luisJSON.closedLists.forEach(listEntity => {
|
||||
listEntity.subLists.forEach(subList => {
|
||||
if (subList.list.length > retCode.boundaryLimits.MAX_LIST_ENTITY_SYNONYMS) {
|
||||
validationError(retCode.errorCode.BOUNDARY_SYNONYMS_LENGTH, `'${listEntity.name}' list entity for parent (normalized value) '${subList.canonicalForm}' has ${subList.list.length} synonyms. At most ${retCode.boundaryLimits.MAX_LIST_ENTITY_SYNONYMS} is allowed.`)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
let phraseLists = luisJSON.model_features || luisJSON.phraseLists || [];
|
||||
// phrase list - 500 phrase lists.
|
||||
if (phraseLists.length > retCode.boundaryLimits.MAX_NUM_PHRASE_LISTS) {
|
||||
validationError(retCode.errorCode.BOUNDARY_PHRASE_LIST_LIMIT, `${phraseLists.length} phrase lists found in application. At most ${retCode.boundaryLimits.MAX_NUM_PHRASE_LISTS} is allowed.`)
|
||||
}
|
||||
|
||||
// phrase list - Maximum number of total phrases per application of 500,000 phrases.
|
||||
let totalPhrasesInApp = 0;
|
||||
phraseLists.forEach(item => totalPhrasesInApp += item.words.split(',').length);
|
||||
if (totalPhrasesInApp > retCode.boundaryLimits.MAX_NUM_PHRASES_IN_ALL_PHRASE_LIST) {
|
||||
validationError(retCode.errorCode.BOUNDARY_TOTAL_PHRASES, `${totalPhrasesInApp} phrases found across all phrase list definitions. At most ${retCode.boundaryLimits.MAX_NUM_PHRASES_IN_ALL_PHRASE_LIST} is allowed.`)
|
||||
}
|
||||
|
||||
// phrase list - Interchangeable Phraselist has max of 50,000 phrases.
|
||||
totalPhrasesInApp = 0;
|
||||
phraseLists.filter(item => item.mode).forEach(item => {
|
||||
if (item.words.split(',').length > retCode.boundaryLimits.MAX_INTERCHANGEABLE_PHRASES) {
|
||||
validationError(retCode.errorCode.BOUNDARY_INTC_PHRASES_LIMIT, `${totalPhrasesInApp} phrases found across all interchangeable phrase list definitions. At most ${retCode.boundaryLimits.MAX_INTERCHANGEABLE_PHRASES} is allowed.`)
|
||||
}
|
||||
})
|
||||
|
||||
// phrase list - Non-interchangeable phraselist has max of 5,000 phrases.
|
||||
totalPhrasesInApp = 0;
|
||||
phraseLists.filter(item => !item.mode).forEach(item => {
|
||||
if (item.words.split(',').length > retCode.boundaryLimits.MAX_NON_INTERCHANGEABLE_PHRASES) {
|
||||
validationError(retCode.errorCode.BOUNDARY_NINTC_PHRASES_LIMIT, `${totalPhrasesInApp} phrases found across all non-interchangeable phrase list definitions. At most ${retCode.boundaryLimits.MAX_NON_INTERCHANGEABLE_PHRASES} is allowed.`)
|
||||
}
|
||||
})
|
||||
|
||||
// Roles - 10 roles per entity
|
||||
let totalRoles = 0;
|
||||
["prebuiltEntities", "patternAnyEntities", "regex_entities", "closedLists", "composites", "entities"].forEach(scope => {
|
||||
luisJSON[scope].forEach(item => {
|
||||
if (item.roles.length > retCode.boundaryLimits.MAX_ROLES_PER_ENTITY) {
|
||||
validationError(retCode.errorCode.BOUNDARY_ROLES_PER_ENTITY, `${scope.substring(0, scope.length - 1)} ${item.name} has ${item.roles.length} roles. At most ${retCode.boundaryLimits.MAX_ROLES_PER_ENTITY} is allowed.`)
|
||||
}
|
||||
totalRoles += item.roles.length;
|
||||
})
|
||||
})
|
||||
|
||||
// Roles - 300 roles per application
|
||||
if (totalRoles > retCode.boundaryLimits.MAX_NUM_ROLES) {
|
||||
validationError(retCode.errorCode.BOUNDARY_TOTAL_ROLES, `${totalRoles} role definitions found across all entity types. At most ${retCode.boundaryLimits.MAX_NUM_ROLES} is allowed.`)
|
||||
}
|
||||
|
||||
// features - Maximum number of models that can be used as a descriptor (feature) to a specific model to be 10 models.
|
||||
["intents", "entities"].forEach(scope => {
|
||||
luisJSON[scope].forEach(item => {
|
||||
if (item.features && item.features.length > retCode.boundaryLimits.MAX_NUM_DESCRIPTORS_PER_MODEL) {
|
||||
validationError(retCode.errorCode.BOUNDARY_FEATURE_PER_MODEL, `${scope.substring(0, scope.length - 1)} ${item.name} has ${item.features.length} descriptors (feature). At most ${retCode.boundaryLimits.MAX_NUM_DESCRIPTORS_PER_MODEL} is allowed.`)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// ml entities + roles - A limit of either 100 parent entities or 330 entities, whichever limit the user hits first. A role counts as an entity for the purpose of this boundary. An example is a composite with a simple entity, which has 2 roles is: 1 composite + 1 simple + 2 roles = 4 of the 330 entities.
|
||||
let numberOfParentEntities = 0;
|
||||
luisJSON.entities.forEach(item => {
|
||||
if (item.children && item.children.length > 0) numberOfParentEntities += 1;
|
||||
})
|
||||
|
||||
let totalNumberOfEntitiesAndRoles = 0;
|
||||
["prebuiltEntities", "patternAnyEntities", "regex_entities", "closedLists", "composites", "entities"].forEach(item => {
|
||||
totalNumberOfEntitiesAndRoles += luisJSON[item].length;
|
||||
})
|
||||
totalNumberOfEntitiesAndRoles += totalRoles;
|
||||
|
||||
if (numberOfParentEntities > retCode.boundaryLimits.MAX_NUM_PARENT_ENTITIES) {
|
||||
validationError(retCode.errorCode.BOUNDARY_PARENT_ENTITY_LIMIT, `${numberOfParentEntities} parent ml entities found in application. At most ${retCode.boundaryLimits.MAX_NUM_PARENT_ENTITIES} is allowed.`)
|
||||
}
|
||||
|
||||
if (totalNumberOfEntitiesAndRoles > retCode.boundaryLimits.MAX_TOTAL_ENTITES_AND_ROLES) {
|
||||
validationError(retCode.errorCode.BOUNDARY_TOTAL_ENTITIES_AND_ROLES, `${totalNumberOfEntitiesAndRoles} combined roles and entity definitions found. At most ${retCode.boundaryLimits.MAX_TOTAL_ENTITES_AND_ROLES} is allowed.`)
|
||||
}
|
||||
|
||||
// up to 50 list entities
|
||||
if (luisJSON.closedLists.length > retCode.boundaryLimits.MAX_NUM_CLOSED_LISTS) {
|
||||
validationError(retCode.errorCode.BOUNDARY_TOTAL_CLOSED_LISTS, `${luisJSON.closedLists.length} list entity definitions found. At most ${retCode.boundaryLimits.MAX_NUM_CLOSED_LISTS} is allowed.`)
|
||||
}
|
||||
}
|
||||
const validationError = function (code, errorMsg) {
|
||||
let error = BuildDiagnostic({ message: errorMsg });
|
||||
throw (new exception(code, error.toString(), [error]));
|
||||
}
|
||||
const addEntities = function(luisJSON, entitiesList){
|
||||
luisJSON.entities.forEach(function (entity) {
|
||||
entitiesList.push(new validateLUISBlobEntity(entity.name, ['simple'], entity.roles));
|
||||
});
|
||||
}
|
||||
const addClosedLists = function(luisJSON, entitiesList, entityFound){
|
||||
luisJSON.closedLists.forEach(function (entity) {
|
||||
entityFound = helpers.filterMatch(entitiesList, 'name', entity.name);
|
||||
if (entityFound.length === 0) {
|
||||
entitiesList.push(new validateLUISBlobEntity(entity.name, ['list'], entity.roles));
|
||||
} else {
|
||||
entityFound[0].type.push('list');
|
||||
}
|
||||
});
|
||||
}
|
||||
const addPatternAnyEntities = function(luisJSON, entitiesList, entityFound){
|
||||
luisJSON.patternAnyEntities.forEach(function (entity) {
|
||||
entityFound = helpers.filterMatch(entitiesList, 'name', entity.name);
|
||||
if (entityFound.length === 0) {
|
||||
entitiesList.push(new validateLUISBlobEntity(entity.name, ['patternAny'], entity.roles));
|
||||
} else {
|
||||
entityFound[0].type.push('patternAny');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const addregexEntities = function(luisJSON, entitiesList, entityFound){
|
||||
luisJSON.regex_entities.forEach(function (entity) {
|
||||
entityFound = helpers.filterMatch(entitiesList, 'name', entity.name);
|
||||
if (entityFound.length === 0) {
|
||||
entitiesList.push(new validateLUISBlobEntity(entity.name, [`regEx:/${entity.regexPattern}/`], entity.roles));
|
||||
} else {
|
||||
if (entityFound[0].regexPattern !== undefined) {
|
||||
if (entityFound[0].regexPattern !== entity.regexPattern)
|
||||
entityFound[0].type.push(`regEx:/${entity.regexPattern}/`);
|
||||
} else {
|
||||
entityFound[0].type.push(`regEx:/${entity.regexPattern}/`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const addCompositeEntities = function(luisJSON, entitiesList, entityFound){
|
||||
(luisJSON.composites || []).forEach(entity => {
|
||||
entityFound = helpers.filterMatch(entitiesList, 'name', entity.name);
|
||||
if (entityFound.length === 0) {
|
||||
entitiesList.push(new validateLUISBlobEntity(entity.name, ['composite'], entity.roles));
|
||||
} else {
|
||||
entityFound[0].type.push('composite');
|
||||
}
|
||||
})
|
||||
}
|
||||
const addPrebuiltEntities = function(luisJSON, entitiesList, entityFound){
|
||||
(luisJSON.prebuiltEntities || []).forEach(entity => {
|
||||
entityFound = helpers.filterMatch(entitiesList, 'name', entity.name);
|
||||
if (entityFound.length === 0) {
|
||||
entitiesList.push(new validateLUISBlobEntity(entity.name, ['prebuilt'], entity.roles));
|
||||
} else {
|
||||
entityFound[0].type.push('prebuilt');
|
||||
}
|
||||
})
|
||||
}
|
||||
const validateEntities = function(entitiesList){
|
||||
entitiesList.forEach(function (entity) {
|
||||
if (entity.type.length > 1) {
|
||||
let errorMsg = `Entity ${entity.name} has duplicate definitions.\r\n\t` + JSON.stringify(entity.type, 2, null);
|
||||
let error = BuildDiagnostic({ message: errorMsg });
|
||||
|
||||
throw (new exception(retCode.errorCode.DUPLICATE_ENTITIES, error.toString(), [error]));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const validateUtterances = function(luisJSON, entitiesList){
|
||||
if (luisJSON.utterances.length <= 0) {
|
||||
return
|
||||
}
|
||||
for (let entity in luisJSON.utterances.entities) {
|
||||
let entityInList = helpers.filterMatch(entitiesList, 'name', entity.entity);
|
||||
if (entityInList.length > 0) {
|
||||
continue
|
||||
}
|
||||
if (entityInList[0].type.includes('phraseList')) {
|
||||
let errorMsg = `Utterance "${utterance.text}" has reference to PhraseList. \r\n\tYou cannot have utterances with phraselist references in them`;
|
||||
let error = BuildDiagnostic({ message: errorMsg });
|
||||
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString(), [error]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const validateComposites = function(luisJSON, entitiesList){
|
||||
if (luisJSON.composites.length <= 0) {
|
||||
return
|
||||
}
|
||||
|
||||
luisJSON.composites.forEach(composite => {
|
||||
// composites cannot include pattern.any entities as children
|
||||
let patternAnyEntityInComposite = (luisJSON.patternAnyEntities || []).find(patternAnyEntity => {
|
||||
return composite.children.includes(patternAnyEntity.name);
|
||||
});
|
||||
if (patternAnyEntityInComposite !== undefined) {
|
||||
let errorMsg = `Composite entity "${composite.name}" includes pattern.any entity "${patternAnyEntityInComposite.name}".\r\n\tComposites cannot include pattern.any entity as a child.`;
|
||||
let error = BuildDiagnostic({ message: errorMsg });
|
||||
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString(), [error]));
|
||||
}
|
||||
// composite entity definitions must have valid child entity type definitions.
|
||||
validateCompositeChildren(composite, entitiesList)
|
||||
})
|
||||
}
|
||||
|
||||
const validateCompositeChildren = function(composite, entitiesList){
|
||||
composite.children.forEach(child => {
|
||||
if (child instanceof Object) child = child.name;
|
||||
// Fix for #1165
|
||||
// Current implementation does not account for explicit role included in a child
|
||||
let childEntityName = child;
|
||||
let childEntityRole = '';
|
||||
if (child.includes(':')) {
|
||||
let childSplit = child.split(':').map(item => item.trim());
|
||||
childEntityName = childSplit[0];
|
||||
childEntityRole = childSplit[1];
|
||||
}
|
||||
let compositeChildEntityFound = (entitiesList || []).find(entity => entity.name == childEntityName);
|
||||
if (compositeChildEntityFound === undefined) {
|
||||
let errorMsg = `Composite entity "${composite.name}" includes an undefined child entity "${childEntityName}".\r\n\tAll children of composite entities must be explicitly defined or implicitly defined via an utterance or a pattern`;
|
||||
let error = BuildDiagnostic({ message: errorMsg });
|
||||
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString(), [error]));
|
||||
}
|
||||
if (childEntityRole != '' &&
|
||||
!compositeChildEntityFound.roles.includes(childEntityRole)) {
|
||||
let errorMsg = `Composite entity "${composite.name}" includes an undefined child entity role "${childEntityName}:${childEntityRole}".\r\n\tAll children of composite entities must be explicitly defined or implicitly defined via an utterance or a pattern`;
|
||||
let error = BuildDiagnostic({ message: errorMsg });
|
||||
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString(), [error]));
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const validateFeatureAssignments = function(luisJSON)
|
||||
{
|
||||
const verifyList = [];
|
||||
const featureCandidates = [];
|
||||
const verifiedList = [];
|
||||
addFeatureCandidates(luisJSON.prebuiltEntities, featureCandidates);
|
||||
addFeatureCandidates(luisJSON.closedLists, featureCandidates);
|
||||
addFeatureCandidates(luisJSON.regex_entities, featureCandidates);
|
||||
addFeatureCandidates(luisJSON.phraselists, featureCandidates);
|
||||
verifyAndExtractFeatures(luisJSON.intents, verifyList, featureCandidates, verifiedList);
|
||||
verifyAndExtractFeatures(luisJSON.entities, verifyList, featureCandidates, verifiedList);
|
||||
verifyAndExtractFeatures(luisJSON.composites, verifyList, featureCandidates, verifiedList);
|
||||
if (verifyList.length !== 0) {
|
||||
verifyList.forEach(item => {
|
||||
if (!featureCandidates.includes(item)) {
|
||||
let errorMsg = `Feature "${item}" does not have a definition. All features must be defined.`;
|
||||
let error = BuildDiagnostic({ message: errorMsg });
|
||||
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString(), [error]));
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const addFeatureCandidates = function(collection, featureCandidates)
|
||||
{
|
||||
(collection || []).forEach(item => {
|
||||
if (!featureCandidates.includes(item.name)) {
|
||||
featureCandidates.push(item.name);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const verifyAndExtractFeatures = function(collection = [], verifyList = [], featureCandidates = [], verifiedList = []) {
|
||||
(collection || []).forEach(item => {
|
||||
if (!featureCandidates.includes(item.name)) {
|
||||
featureCandidates.push(item.name);
|
||||
}
|
||||
(item.features || []).forEach(feature => {
|
||||
let featureName = feature.modelName || feature.featureName || undefined;
|
||||
if (featureName !== undefined) {
|
||||
if (!verifiedList.includes(featureName)) {
|
||||
if (!featureCandidates.includes(featureName)) {
|
||||
if (!verifyList.includes(featureName)) {
|
||||
verifyList.push(featureName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
if (item.children && Array.isArray(item.children) && item.children.length !== 0) verifyAndExtractFeatures(item.children, verifyList, featureCandidates, verifiedList);
|
||||
})
|
||||
}
|
||||
|
||||
class validateLUISBlobEntity{
|
||||
constructor(name, type, roles) {
|
||||
this.name = name?name:'';
|
||||
this.type = type?type:[];
|
||||
this.roles = roles?roles:[];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = validateLUIS
|
|
@ -0,0 +1,19 @@
|
|||
|
||||
module.exports = {
|
||||
jsonPropertyName: function(property) {
|
||||
if (typeof property === 'object') {
|
||||
property = property.name
|
||||
}
|
||||
property+= ''
|
||||
let name = property.split(':').slice(-1)[0];
|
||||
if (!name.startsWith('geographyV2') &&
|
||||
!name.startsWith('ordinalV2') &&
|
||||
name.endsWith('V2')) {
|
||||
name = name.substring(0, name.length - 2);
|
||||
}
|
||||
return this.normalizeName(name);
|
||||
},
|
||||
normalizeName: function(name) {
|
||||
return name.replace(/\./g, '_').replace(/ /g, '_');
|
||||
}
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const qnaAlterationsToLuContent = require('./qnaConverter')
|
||||
|
||||
class Alterations {
|
||||
constructor(alterations = null) {
|
||||
if (alterations) {
|
||||
for (let prop in alterations) {
|
||||
this[prop] = alterations[prop];
|
||||
}
|
||||
} else {
|
||||
this.wordAlterations = [];
|
||||
}
|
||||
}
|
||||
|
||||
parseToLuContent() {
|
||||
return qnaAlterationsToLuContent(this)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Alterations
|
|
@ -0,0 +1,24 @@
|
|||
const NEWLINE = require('os').EOL;
|
||||
|
||||
/**
|
||||
* Parses an Alterations object into Qna Content
|
||||
* @param {Alterations} alterationsJSON
|
||||
* @returns {string} Qna Content
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
const qnaAlterationsToLuContent = function(alterationsJSON){
|
||||
let fileContent = '> # QnA Alterations' + NEWLINE + NEWLINE;
|
||||
if(alterationsJSON.wordAlterations && alterationsJSON.wordAlterations.length > 0) {
|
||||
alterationsJSON.wordAlterations.forEach(function(alteration) {
|
||||
fileContent += '$' + alteration.alterations[0] + ' : ' + 'qna-alterations = ' + NEWLINE;
|
||||
alteration.alterations.splice(0, 1);
|
||||
alteration.alterations.forEach(function(item) {
|
||||
fileContent += '- ' + item + NEWLINE;
|
||||
})
|
||||
fileContent += NEWLINE;
|
||||
});
|
||||
}
|
||||
return fileContent
|
||||
}
|
||||
|
||||
module.exports = qnaAlterationsToLuContent
|
|
@ -0,0 +1,27 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const qnaConverter = require('./qnaConverter')
|
||||
|
||||
class KB {
|
||||
constructor(qnaJSON = null) {
|
||||
if (qnaJSON) {
|
||||
for (let prop in qnaJSON) {
|
||||
this[prop] = qnaJSON[prop];
|
||||
}
|
||||
} else {
|
||||
this.urls = [];
|
||||
this.qnaList = [];
|
||||
this.files = [];
|
||||
this.name = '';
|
||||
}
|
||||
}
|
||||
|
||||
parseToLuContent() {
|
||||
return qnaConverter(this)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = KB
|
|
@ -0,0 +1,199 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const KB = require('./kb')
|
||||
const deepEqual = require('deep-equal')
|
||||
const exception = require('../../utils/exception')
|
||||
const retCode = require('../../utils/enums/CLI-errors').errorCode
|
||||
const mergeLuFiles = require('./../../lu/luMerger').Build
|
||||
const Alterations = require('./../alterations/alterations')
|
||||
const QnAMaker = require('./qnamaker')
|
||||
|
||||
/**
|
||||
* Builds a QnAMaker instance from a Qna list.
|
||||
* @param {Array<Qna>} qnaObjArray Array of QnA files to be merge
|
||||
* @param {boolean} verbose indicates if we need verbose logging.
|
||||
* @param {function} qnaSearchFn function to retrieve the lu files found in the references
|
||||
* @returns {QnAMaker} new QnAMaker instance
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
const build = async function(qnaObjArray, verbose, qnaSearchFn) {
|
||||
let mergedContent = await mergeLuFiles(qnaObjArray, verbose, '', qnaSearchFn)
|
||||
let parsedQnAList = mergedContent.QnAContent.filter(item => item.includeInCollate)
|
||||
|
||||
let qnaList = []
|
||||
parsedQnAList.forEach(index =>{
|
||||
qnaList.push(index.qnaJsonStructure)
|
||||
})
|
||||
let kbResult = collate(qnaList)
|
||||
|
||||
let allParsedQnAAlterations = mergedContent.QnAAlterations.filter(item => item.includeInCollate)
|
||||
let finalQnAAlterationsList = new Alterations()
|
||||
allParsedQnAAlterations.forEach(function (alterationList) {
|
||||
alterationList = alterationList.qnaAlterations;
|
||||
if (alterationList.wordAlterations) {
|
||||
alterationList.wordAlterations.forEach(function (alteration) {
|
||||
finalQnAAlterationsList.wordAlterations.push(alteration);
|
||||
})
|
||||
}
|
||||
})
|
||||
return new QnAMaker(kbResult, finalQnAAlterationsList)
|
||||
}
|
||||
|
||||
const collate = function(qnaList) {
|
||||
let result = new KB()
|
||||
for (let i = 0; i < qnaList.length; i++) {
|
||||
let blob = qnaList[i]
|
||||
// does this blob have URLs?
|
||||
collateUrls(result, blob)
|
||||
// does this blob have files?
|
||||
collateFiles(result, blob)
|
||||
// does this blob have qnapairs?
|
||||
collateQnAPairs(result, blob)
|
||||
|
||||
result.name = blob.name ? blob.name : result.name
|
||||
|
||||
};
|
||||
resolveMultiTurnReferences(result.qnaList)
|
||||
resolveQnAIds(result.qnaList)
|
||||
return result
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
collate,
|
||||
build
|
||||
}
|
||||
|
||||
const resolveMultiTurnReferences = function(qnaList) {
|
||||
let qnaPairsWithMultiTurn = qnaList.filter(item => item.context.prompts.length !== 0);
|
||||
// find the largetst auto-id
|
||||
let largestAutoIdxList = qnaList.filter(item => item.id !== 0 && item.id.toString().startsWith('*auto*'));
|
||||
let largestAutoIdx = 0;
|
||||
if (largestAutoIdxList.length !== 0) {
|
||||
let idx = largestAutoIdxList.reduce(function(max, obj) {
|
||||
return parseInt(obj.id.replace('*auto*', '')) > parseInt(max.id.replace('*auto*', '')) ? obj : max;
|
||||
});
|
||||
largestAutoIdx = parseInt(idx.id.replace('*auto*', '')) + 1;
|
||||
}
|
||||
|
||||
(qnaPairsWithMultiTurn || []).forEach(item => {
|
||||
// find matching QnA id for each follow up prompt
|
||||
(item.context.prompts || []).forEach(prompt => {
|
||||
// find by ID first
|
||||
let qnaId = qnaList.find(x => x.id === prompt.qnaId || x.id === parseInt(prompt.qnaId));
|
||||
if (!qnaId) {
|
||||
// find by question match
|
||||
qnaId = qnaList.find(x => x.source.trim() !== 'crosstrained' && (x.questions.includes(prompt.qnaId) || x.questions.includes(prompt.qnaId.replace(/-/g, ' ').trim())))
|
||||
}
|
||||
if (qnaId === undefined) {
|
||||
throw (new exception(retCode.INVALID_INPUT, `[ERROR]: Cannot find follow up prompt definition for '- [${prompt.displayText}](#?${prompt.qnaId}).`));
|
||||
} else {
|
||||
if (qnaId.id === 0) {
|
||||
qnaId.id = `*auto*${largestAutoIdx++}`;
|
||||
}
|
||||
prompt.qnaId = qnaId.id;
|
||||
prompt.qna = null;
|
||||
qnaId.context.isContextOnly = !qnaId.context.isContextOnly ? prompt.contextOnly : true;
|
||||
delete prompt.contextOnly;
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const resolveQnAIds = function(qnaList) {
|
||||
let qnaIdsAssigned = [];
|
||||
let baseQnaId = 1;
|
||||
// find all explicitly assigned IDs
|
||||
let qnasWithId = qnaList.filter(pair => (pair.id !== 0) && (!pair.id.toString().startsWith('*auto*')));
|
||||
qnasWithId.forEach(qna => {
|
||||
let qnaId = 0;
|
||||
// this is the only enforcement for IDs being numbers.
|
||||
if (isNaN(qna.id)) throw (new exception(retCode.INVALID_INPUT, `[Error]: Explicitly assigned QnA Ids must be numbers. '${qna.id}' is not a number.`));
|
||||
qnaId = parseInt(qna.id);
|
||||
if(!qnaIdsAssigned.includes(qnaId)) qnaIdsAssigned.push(qnaId)
|
||||
});
|
||||
|
||||
// finalize IDs for everything that was auto id'd
|
||||
let qnasWithAutoId = qnaList.filter(pair => (pair.id !== 0) && isNaN(pair.id) && (pair.id.toString().startsWith('*auto*')));
|
||||
qnasWithAutoId.forEach(qna => {
|
||||
// get a new ID
|
||||
let newIdToAssign = getNewId(qnaIdsAssigned, baseQnaId++);
|
||||
// find all child references to this id and update.
|
||||
qnaList.forEach(pair => {
|
||||
if (pair.context.prompts.length === 0) return;
|
||||
pair.context.prompts.forEach(prompt => {
|
||||
if (prompt.qnaId === qna.id) {
|
||||
prompt.qnaId = newIdToAssign;
|
||||
}
|
||||
})
|
||||
});
|
||||
qna.id = newIdToAssign;
|
||||
})
|
||||
|
||||
// finalize IDs for everyone else.
|
||||
let qnasWithoutId = qnaList.filter(pair => pair.id === 0);
|
||||
qnasWithoutId.forEach(qna => {
|
||||
if (qnasWithId.length !== 0 || qnasWithAutoId.length !== 0) {
|
||||
qna.id = getNewId(qnaIdsAssigned, baseQnaId++);
|
||||
} else {
|
||||
// remove context for back compat.
|
||||
delete qna.context;
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
const getNewId = function(currentList, curId) {
|
||||
while (currentList.includes(curId)) curId++;
|
||||
currentList.push(curId)
|
||||
return curId;
|
||||
}
|
||||
|
||||
const collateFiles = function(instance, qnaObject) {
|
||||
if (qnaObject.files.length === 0) {
|
||||
return
|
||||
}
|
||||
// add this url if this does not already exist in finaljson
|
||||
qnaObject.files.forEach(function (qnaFile) {
|
||||
if (instance.files.filter(item => { return item.fileUri == qnaFile.fileUri }).length === 0) {
|
||||
instance.files.push(qnaFile);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const collateQnAPairs = function(instance, qnaObject) {
|
||||
if (qnaObject.qnaList.length === 0) {
|
||||
return
|
||||
}
|
||||
// walk through each qnaPair and add it if it does not exist
|
||||
qnaObject.qnaList.forEach(function (newQnAItem) {
|
||||
if (instance.qnaList.length == 0) {
|
||||
instance.qnaList.push(newQnAItem);
|
||||
} else {
|
||||
let qnaExists = false;
|
||||
let fIndex = 0;
|
||||
for (fIndex in instance.qnaList) {
|
||||
if (deepEqual(instance.qnaList[fIndex], newQnAItem)) {
|
||||
qnaExists = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!qnaExists) instance.qnaList.push(newQnAItem);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const collateUrls = function(instance, qnaObject) {
|
||||
if (qnaObject.urls.length == 0) {
|
||||
return
|
||||
}
|
||||
// add this url if this does not already exist in finaljson
|
||||
qnaObject.urls.forEach(function (qnaUrl) {
|
||||
if (!instance.urls.includes(qnaUrl)) {
|
||||
instance.urls.push(qnaUrl);
|
||||
}
|
||||
});
|
||||
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
/**
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
class qnaContext {
|
||||
/**
|
||||
* @property {Boolean} isContextOnly
|
||||
*/
|
||||
/**
|
||||
* @property {qnaMetaData []} prompts
|
||||
*/
|
||||
constructor(isContextOnly, prompts) {
|
||||
this.isContextOnly = isContextOnly ? isContextOnly : false;
|
||||
this.prompts = prompts ? prompts : [];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = qnaContext;
|
|
@ -0,0 +1,66 @@
|
|||
const NEWLINE = require('os').EOL;
|
||||
const exception = require('./../../utils/exception')
|
||||
const retCode = require('./../../utils/enums/CLI-errors')
|
||||
const QNA_GENERIC_SOURCE = "custom editorial";
|
||||
|
||||
/**
|
||||
* Parses a QnAMaker object into Qna Content
|
||||
* @param {QnAMaker} qnaJSON
|
||||
* @returns {string} Qna Content
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
const qnaToLuContent = function(qnaJSON){
|
||||
let fileContent = '> # QnA pairs' + NEWLINE + NEWLINE;
|
||||
let root = null;
|
||||
if(qnaJSON.qnaDocuments) {
|
||||
root = qnaJSON.qnaDocuments;
|
||||
} else {
|
||||
root = qnaJSON.qnaList;
|
||||
}
|
||||
|
||||
if (!root) {
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT_FILE, 'No input QnA content found '));
|
||||
}
|
||||
|
||||
if(root.length <= 0) {
|
||||
return fileContent
|
||||
}
|
||||
|
||||
root.forEach(function(qnaItem) {
|
||||
fileContent += '> !# @qna.pair.source = ' + qnaItem.source + NEWLINE + NEWLINE;
|
||||
fileContent += qnaItem.id.toString() !== "0" ? `<a id = "${qnaItem.id}"></a>` + NEWLINE + NEWLINE : '';
|
||||
fileContent += '## ? ' + qnaItem.questions[0] + NEWLINE;
|
||||
qnaItem.questions.splice(0,1);
|
||||
qnaItem.questions.forEach(function(question) {
|
||||
fileContent += '- ' + question + NEWLINE;
|
||||
})
|
||||
fileContent += NEWLINE;
|
||||
if(qnaItem.metadata && qnaItem.metadata.length > 0) {
|
||||
fileContent += '**Filters:**' + NEWLINE;
|
||||
qnaItem.metadata.forEach(function(filter) {
|
||||
fileContent += '- ' + filter.name + ' = ' + filter.value + NEWLINE;
|
||||
});
|
||||
fileContent += NEWLINE;
|
||||
}
|
||||
fileContent += '```markdown' + NEWLINE;
|
||||
fileContent += qnaItem.answer + NEWLINE;
|
||||
fileContent += '```' + NEWLINE;
|
||||
if (qnaItem.context && qnaItem.context.prompts && qnaItem.context.prompts.length !== 0) {
|
||||
fileContent += NEWLINE + '**Prompts:**' + NEWLINE;
|
||||
qnaItem.context.prompts.forEach(function(prompt) {
|
||||
fileContent += `- [${prompt.displayText}](#${prompt.qnaId})`;
|
||||
// See if the linked prompt is context only and if so, add the decoration.
|
||||
let promptQnA = root.find(item => item.id == prompt.qnaId);
|
||||
if (promptQnA) {
|
||||
fileContent += promptQnA.context.isContextOnly === true ? ` \`context-only\`` : '';
|
||||
}
|
||||
fileContent += NEWLINE;
|
||||
})
|
||||
}
|
||||
fileContent += NEWLINE;
|
||||
});
|
||||
|
||||
return fileContent
|
||||
}
|
||||
|
||||
module.exports = qnaToLuContent
|
|
@ -0,0 +1,19 @@
|
|||
/**
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
class qnaFile {
|
||||
/**
|
||||
* @property {string} fileUri
|
||||
*/
|
||||
/**
|
||||
* @property {string} fileName
|
||||
*/
|
||||
|
||||
constructor(fileUri, fileName) {
|
||||
this.fileName = fileName?fileName:'';
|
||||
this.fileUri = fileUri?fileUri:'';
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = qnaFile;
|
|
@ -0,0 +1,37 @@
|
|||
/**
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const qnaContext = require('./qnaContext');
|
||||
|
||||
class qnaList {
|
||||
/**
|
||||
* @property {string} id
|
||||
*/
|
||||
/**
|
||||
* @property {string} answer
|
||||
*/
|
||||
/**
|
||||
* @property {string} source
|
||||
*/
|
||||
/**
|
||||
* @property {string []} questions
|
||||
*/
|
||||
/**
|
||||
* @property {qnaMetaData []} metadata
|
||||
*/
|
||||
/**
|
||||
* @property {qnaContext} context
|
||||
*/
|
||||
constructor(id, answer, source, questions, metadata, context) {
|
||||
this.id = id?id:0;
|
||||
this.answer = answer?answer:'';
|
||||
this.source = source?source:'custom editorial';
|
||||
this.questions = questions?questions:[];
|
||||
this.metadata = metadata?metadata:[];
|
||||
this.context = context ? context : new qnaContext();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = qnaList;
|
|
@ -0,0 +1,76 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const Alterations = require('./../alterations/alterations')
|
||||
const QnAMaker = require('./qnamaker')
|
||||
const KB = require('./kb')
|
||||
const build = require('./kbCollate').build
|
||||
const parseFileContents = require('./../../lufile/parseFileContents').parseFile
|
||||
|
||||
class QnABuilder{
|
||||
/**
|
||||
* Builds a QnAMaker instance from a KB json.
|
||||
* @param {JSON} kbJson QnAMaker json
|
||||
* @returns {QnAMaker} new QnAMaker instance
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
static async fromKB(kbJson) {
|
||||
return new QnAMaker(new KB(kbJson))
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a QnAMaker instance from a Alterations Json.
|
||||
* @param {JSON} alterationsJson QnAMaker json
|
||||
* @returns {QnAMaker} new QnAMaker instance
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
static async fromAlterations(alterationsJson) {
|
||||
return new QnAMaker('', new Alterations(alterationsJson))
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a QnAMaker instance from a KB and Alterations Json.
|
||||
* @param {JSON} kbJson KB json
|
||||
* @param {JSON} alterationsJsonson alterations json
|
||||
* @returns {QnAMaker} new QnAMaker instance
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
static async fromKbAndAlterations(kbJson, alterationsJson) {
|
||||
return new QnAMaker(new KB(kbJson), new Alterations(alterationsJson))
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a QnAMaker instance from a qna content.
|
||||
* @param {Qna} qnaContent QnA content
|
||||
* @returns {QnAMaker} new QnAMaker instance
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
static async fromContent(qnaContent) {
|
||||
let parsedContent = await parseFileContents(qnaContent, false)
|
||||
return new QnAMaker(new KB(parsedContent.qnaJsonStructure), new Alterations(parsedContent.qnaAlterations))
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a QnAMaker instance from a qna list.
|
||||
* @param {Array<QnA>} qnaObjArray Array of LU files to be merge
|
||||
* @param {function} qnaSearchFn function to retrieve the qna files found in the references
|
||||
* @returns {QnAMaker} new QnAMaker instance
|
||||
* @throws {exception} Throws on errors. exception object includes errCode and text.
|
||||
*/
|
||||
static async fromQna(qnaObjArray, qnaSearchFn) {
|
||||
if(!Array.isArray(qnaObjArray)) {
|
||||
new QnAMaker()
|
||||
}
|
||||
|
||||
if(qnaObjArray.length === 1){
|
||||
let parsedContent = await parseFileContents(qnaObjArray[0].content, false)
|
||||
return new QnAMaker(new KB(parsedContent.qnaJsonStructure), new Alterations(parsedContent.qnaAlterations))
|
||||
}
|
||||
|
||||
return build(qnaObjArray, false, qnaSearchFn)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = QnABuilder
|
|
@ -0,0 +1,18 @@
|
|||
/**
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
class qnaMetaData {
|
||||
/**
|
||||
* @property {string} name
|
||||
*/
|
||||
/**
|
||||
* @property {string} value
|
||||
*/
|
||||
constructor(name, value) {
|
||||
this.name = name?name:'';
|
||||
this.value = value?value:'';
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = qnaMetaData;
|
|
@ -0,0 +1,30 @@
|
|||
/**
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
class qnaPrompt {
|
||||
/**
|
||||
* @property {String} displayOrder
|
||||
*/
|
||||
/**
|
||||
* @property {String} qnaId
|
||||
*/
|
||||
/**
|
||||
* @property {String} displayText
|
||||
*/
|
||||
/**
|
||||
* @property {Boolean} contextonly
|
||||
*/
|
||||
/**
|
||||
* @property {qnaList} qna
|
||||
*/
|
||||
constructor(displayText, qnaId, qna, contextOnly = false, displayOrder = 0) {
|
||||
this.displayText = displayText ? displayText : undefined;
|
||||
this.qnaId = qnaId ? qnaId : undefined;
|
||||
this.qna = qna ? qna : undefined;
|
||||
this.contextOnly = contextOnly ? contextOnly : false;
|
||||
this.displayOrder = displayOrder ? displayOrder : 0;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = qnaPrompt;
|
|
@ -0,0 +1,27 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const KB = require('./kb')
|
||||
const Alterations = require('./../alterations/alterations')
|
||||
const QnA = require('./../../lu/qna')
|
||||
|
||||
class QnAMaker {
|
||||
constructor(kb = null, alterations = null){
|
||||
this.kb = kb instanceof KB ? kb : null
|
||||
this.alterations = alterations instanceof Alterations ? alterations : null
|
||||
}
|
||||
|
||||
parseToQnAContent() {
|
||||
let content = this.kb ? this.kb.parseToLuContent() : ''
|
||||
content += this.alterations ? this.alterations.parseToLuContent() : ''
|
||||
return content
|
||||
}
|
||||
|
||||
parseToQNA() {
|
||||
return new QnA(this.parseToQnAContent())
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = QnAMaker
|
|
@ -0,0 +1,562 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
import {QnaBuildCore} from './core'
|
||||
import {Settings} from './settings'
|
||||
import {MultiLanguageRecognizer} from './multi-language-recognizer'
|
||||
import {Recognizer} from './recognizer'
|
||||
import {CrossTrainedRecognizer} from './cross-trained-recognizer'
|
||||
const path = require('path')
|
||||
const fs = require('fs-extra')
|
||||
const delay = require('delay')
|
||||
const fileHelper = require('./../../utils/filehelper')
|
||||
const fileExtEnum = require('./../utils/helpers').FileExtTypeEnum
|
||||
const retCode = require('./../utils/enums/CLI-errors')
|
||||
const exception = require('./../utils/exception')
|
||||
const qnaBuilderVerbose = require('./../qna/qnamaker/kbCollate')
|
||||
const qnaMakerBuilder = require('./../qna/qnamaker/qnaMakerBuilder')
|
||||
const qnaOptions = require('./../lu/qnaOptions')
|
||||
const Content = require('./../lu/qna')
|
||||
const KB = require('./../qna/qnamaker/kb')
|
||||
const recognizerType = require('./../utils/enums/recognizertypes')
|
||||
const LUOptions = require('./../lu/luOptions')
|
||||
|
||||
export class Builder {
|
||||
private readonly handler: (input: string) => any
|
||||
|
||||
constructor(handler: any) {
|
||||
this.handler = handler
|
||||
}
|
||||
|
||||
async loadContents(
|
||||
files: string[],
|
||||
botName: string,
|
||||
suffix: string,
|
||||
region: string,
|
||||
culture: string,
|
||||
schema?: string,
|
||||
importResolver?: object) {
|
||||
let multiRecognizers = new Map<string, MultiLanguageRecognizer>()
|
||||
let settings: any
|
||||
let recognizers = new Map<string, Recognizer>()
|
||||
let qnaContents = new Map<string, any>()
|
||||
let crosstrainedRecognizers = new Map<string, CrossTrainedRecognizer>()
|
||||
let qnaObjects = new Map<string, any[]>()
|
||||
|
||||
for (const file of files) {
|
||||
let fileCulture: string
|
||||
let fileName: string
|
||||
let cultureFromPath = fileHelper.getCultureFromPath(file)
|
||||
if (cultureFromPath) {
|
||||
fileCulture = cultureFromPath
|
||||
let fileNameWithCulture = path.basename(file, path.extname(file))
|
||||
fileName = fileNameWithCulture.substring(0, fileNameWithCulture.length - cultureFromPath.length - 1)
|
||||
} else {
|
||||
fileCulture = culture
|
||||
fileName = path.basename(file, path.extname(file))
|
||||
}
|
||||
|
||||
const fileFolder = path.dirname(file)
|
||||
const crossTrainedFileName = fileName + '.lu.qna.dialog'
|
||||
const crossTrainedRecognizerPath = path.join(fileFolder, crossTrainedFileName)
|
||||
if (!crosstrainedRecognizers.has(fileName)) {
|
||||
let crosstrainedRecognizerContent = []
|
||||
let crosstrainedRecognizerSchema = schema
|
||||
if (fs.existsSync(crossTrainedRecognizerPath)) {
|
||||
let crosstrainedRecognizerObject = JSON.parse(await fileHelper.getContentFromFile(crossTrainedRecognizerPath))
|
||||
crosstrainedRecognizerContent = crosstrainedRecognizerObject.recognizers
|
||||
crosstrainedRecognizerSchema = crosstrainedRecognizerSchema || crosstrainedRecognizerObject.$schema
|
||||
this.handler(`${crossTrainedRecognizerPath} loaded\n`)
|
||||
}
|
||||
|
||||
crosstrainedRecognizers.set(fileName, new CrossTrainedRecognizer(crossTrainedRecognizerPath, crosstrainedRecognizerContent, crosstrainedRecognizerSchema as string))
|
||||
}
|
||||
|
||||
let qnaFiles = await fileHelper.getLuObjects(undefined, file, true, fileExtEnum.QnAFile)
|
||||
this.handler(`${file} loaded\n`)
|
||||
|
||||
// filter empty qna files
|
||||
qnaFiles = qnaFiles.filter((file: any) => file.content !== '')
|
||||
if (qnaFiles.length <= 0) continue
|
||||
|
||||
const multiRecognizerPath = path.join(fileFolder, `${fileName}.qna.dialog`)
|
||||
if (!multiRecognizers.has(fileName)) {
|
||||
let multiRecognizerContent = {}
|
||||
let multiRecognizerSchema = schema
|
||||
if (fs.existsSync(multiRecognizerPath)) {
|
||||
let multiRecognizerObject = JSON.parse(await fileHelper.getContentFromFile(multiRecognizerPath))
|
||||
multiRecognizerContent = multiRecognizerObject.recognizers
|
||||
multiRecognizerSchema = multiRecognizerSchema || multiRecognizerObject.$schema
|
||||
this.handler(`${multiRecognizerPath} loaded\n`)
|
||||
}
|
||||
|
||||
multiRecognizers.set(fileName, new MultiLanguageRecognizer(multiRecognizerPath, multiRecognizerContent, multiRecognizerSchema as string))
|
||||
}
|
||||
|
||||
if (settings === undefined) {
|
||||
const settingsPath = path.join(fileFolder, `qnamaker.settings.${suffix}.${region}.json`)
|
||||
let settingsContent = {}
|
||||
if (fs.existsSync(settingsPath)) {
|
||||
settingsContent = JSON.parse(await fileHelper.getContentFromFile(settingsPath)).qna
|
||||
this.handler(`${settingsPath} loaded\n`)
|
||||
}
|
||||
|
||||
settings = new Settings(settingsPath, settingsContent)
|
||||
}
|
||||
|
||||
const dialogName = `${fileName}.${fileCulture}.qna`
|
||||
const dialogFile = path.join(fileFolder, dialogName + '.dialog')
|
||||
let existingDialogObj: any
|
||||
if (fs.existsSync(dialogFile)) {
|
||||
existingDialogObj = JSON.parse(await fileHelper.getContentFromFile(dialogFile))
|
||||
this.handler(`${dialogFile} loaded\n`)
|
||||
}
|
||||
|
||||
if (existingDialogObj && schema) {
|
||||
existingDialogObj.$schema = schema
|
||||
}
|
||||
|
||||
let recognizer = Recognizer.load(file, dialogName, dialogFile, settings, existingDialogObj, schema)
|
||||
recognizers.set(dialogName, recognizer)
|
||||
|
||||
if (!qnaContents.has(fileCulture)) {
|
||||
let contentPerCulture = new Content('', new qnaOptions(botName, true, fileCulture, file))
|
||||
qnaContents.set(fileCulture, contentPerCulture)
|
||||
qnaObjects.set(fileCulture, qnaFiles)
|
||||
} else {
|
||||
// merge contents of qna files with same culture
|
||||
let qnaObject = qnaObjects.get(fileCulture)
|
||||
if (qnaObject !== undefined) {
|
||||
qnaObject.push(...qnaFiles)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await this.resolveMergedQnAContentIds(qnaContents, qnaObjects, importResolver)
|
||||
|
||||
return {qnaContents: [...qnaContents.values()], recognizers, multiRecognizers, settings, crosstrainedRecognizers}
|
||||
}
|
||||
|
||||
async build(
|
||||
qnaContents: any[],
|
||||
recognizers: Map<string, Recognizer>,
|
||||
subscriptionkey: string,
|
||||
endpoint: string,
|
||||
botName: string,
|
||||
suffix: string,
|
||||
fallbackLocale: string,
|
||||
multiRecognizers?: Map<string, MultiLanguageRecognizer>,
|
||||
settings?: Settings,
|
||||
crosstrainedRecognizers?: Map<string, CrossTrainedRecognizer>,
|
||||
dialogType?: string) {
|
||||
// qna api TPS which means concurrent transactions to qna maker api in 1 second
|
||||
let qnaApiTps = 3
|
||||
|
||||
// set qna maker call delay duration to 1100 millisecond because 1000 can hit corner case of rate limit
|
||||
let delayDuration = 1100
|
||||
|
||||
//default returned recognizer values
|
||||
let recognizerValues: Recognizer[] = []
|
||||
|
||||
let multiRecognizerValues: MultiLanguageRecognizer[] = []
|
||||
|
||||
let settingsValue: any
|
||||
|
||||
let crosstrainedRecognizerValues: CrossTrainedRecognizer[] = []
|
||||
|
||||
// filter if all qna contents are emtty
|
||||
let isAllQnAEmpty = fileHelper.isAllFilesSectionEmpty(qnaContents)
|
||||
|
||||
if (!isAllQnAEmpty) {
|
||||
const qnaBuildCore = new QnaBuildCore(subscriptionkey, endpoint)
|
||||
const kbs = (await qnaBuildCore.getKBList()).knowledgebases
|
||||
|
||||
// here we do a while loop to make full use of qna tps capacity
|
||||
while (qnaContents.length > 0) {
|
||||
// get a number(set by qnaApiTps) of contents for each loop
|
||||
const subQnaContents = qnaContents.splice(0, qnaApiTps)
|
||||
|
||||
// concurrently handle applications
|
||||
await Promise.all(subQnaContents.map(async content => {
|
||||
// init current kb object from qna content
|
||||
const qnaObj = await this.initQnaFromContent(content, botName, suffix)
|
||||
let currentKB = qnaObj.kb
|
||||
let currentAlt = qnaObj.alterations
|
||||
let culture = content.language as string
|
||||
|
||||
let hostName = ''
|
||||
|
||||
// get recognizer
|
||||
let recognizersOfContentCulture: Recognizer[] = []
|
||||
for (let [dialogFileName, recognizer] of recognizers) {
|
||||
const fileNameSplit = dialogFileName.split('.')
|
||||
if (fileNameSplit[fileNameSplit.length - 2] === culture) {
|
||||
// find if there is a matched name with current kb under current authoring key
|
||||
if (!recognizer.getKBId()) {
|
||||
for (let kb of kbs) {
|
||||
if (kb.name === currentKB.name) {
|
||||
recognizer.setKBId(kb.id)
|
||||
hostName = kb.hostName
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
recognizersOfContentCulture.push(recognizer)
|
||||
}
|
||||
}
|
||||
|
||||
let needPublish = false
|
||||
|
||||
// compare models to update the model if a match found
|
||||
// otherwise create a new kb
|
||||
let recognizerWithKBId = recognizersOfContentCulture.find((r: Recognizer) => r.getKBId() !== '')
|
||||
if (recognizerWithKBId !== undefined) {
|
||||
// To see if need update the model
|
||||
needPublish = await this.updateKB(currentKB, qnaBuildCore, recognizerWithKBId, delayDuration)
|
||||
} else {
|
||||
// create a new kb
|
||||
needPublish = await this.createKB(currentKB, qnaBuildCore, recognizersOfContentCulture, delayDuration)
|
||||
}
|
||||
|
||||
const publishRecognizer = recognizerWithKBId || recognizersOfContentCulture[0]
|
||||
|
||||
if (needPublish) {
|
||||
// train and publish kb
|
||||
await this.publishKB(qnaBuildCore, publishRecognizer, currentKB.name, delayDuration)
|
||||
}
|
||||
|
||||
if (hostName === '') hostName = (await qnaBuildCore.getKB(publishRecognizer.getKBId())).hostName
|
||||
|
||||
hostName += '/qnamaker'
|
||||
|
||||
// update alterations if there are
|
||||
if (currentAlt.wordAlterations && currentAlt.wordAlterations.length > 0) {
|
||||
this.handler('Replacing alterations...\n')
|
||||
await qnaBuildCore.replaceAlt(currentAlt)
|
||||
}
|
||||
|
||||
for (const recognizer of recognizersOfContentCulture) {
|
||||
// update multiLanguageRecognizer asset
|
||||
const dialogName = path.basename(recognizer.getDialogPath(), `.${culture}.qna.dialog`)
|
||||
const dialogFileName = path.basename(recognizer.getDialogPath(), '.dialog')
|
||||
if (multiRecognizers && multiRecognizers.has(dialogName)) {
|
||||
let multiRecognizer = multiRecognizers.get(dialogName) as MultiLanguageRecognizer
|
||||
multiRecognizer.recognizers[culture] = dialogFileName
|
||||
if (culture.toLowerCase() === fallbackLocale.toLowerCase()) {
|
||||
multiRecognizer.recognizers[''] = dialogFileName
|
||||
}
|
||||
}
|
||||
|
||||
if (crosstrainedRecognizers && crosstrainedRecognizers.has(dialogName)) {
|
||||
let crosstrainedRecognizer = crosstrainedRecognizers.get(dialogName) as CrossTrainedRecognizer
|
||||
if (!crosstrainedRecognizer.recognizers.includes(dialogName + '.qna')) {
|
||||
crosstrainedRecognizer.recognizers.push(dialogName + '.qna')
|
||||
}
|
||||
}
|
||||
|
||||
// update settings asset
|
||||
if (settings) {
|
||||
settings.qna[dialogFileName.split('.').join('_').replace(/-/g, '_')] = recognizer.getKBId()
|
||||
settings.qna.hostname = hostName
|
||||
}
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
// write dialog assets
|
||||
if (recognizers) {
|
||||
recognizerValues = Array.from(recognizers.values())
|
||||
}
|
||||
|
||||
if (multiRecognizers) {
|
||||
multiRecognizerValues = Array.from(multiRecognizers.values())
|
||||
}
|
||||
|
||||
if (settings) {
|
||||
settingsValue = settings as Settings
|
||||
}
|
||||
}
|
||||
|
||||
if (dialogType === recognizerType.CROSSTRAINED && crosstrainedRecognizers) {
|
||||
crosstrainedRecognizerValues = Array.from(crosstrainedRecognizers.values())
|
||||
}
|
||||
|
||||
const dialogContents = this.generateDeclarativeAssets(recognizerValues, multiRecognizerValues, settingsValue, crosstrainedRecognizerValues)
|
||||
|
||||
return dialogContents
|
||||
}
|
||||
|
||||
async getEndpointKeys(subscriptionkey: string, endpoint: string) {
|
||||
const qnaBuildCore = new QnaBuildCore(subscriptionkey, endpoint)
|
||||
const endPointKeys = await qnaBuildCore.getEndpointKeys()
|
||||
|
||||
return endPointKeys
|
||||
}
|
||||
|
||||
async importUrlReference(
|
||||
url: string,
|
||||
subscriptionkey: string,
|
||||
endpoint: string,
|
||||
kbName: string) {
|
||||
const qnaBuildCore = new QnaBuildCore(subscriptionkey, endpoint)
|
||||
const kbs = (await qnaBuildCore.getKBList()).knowledgebases
|
||||
|
||||
let kbId = ''
|
||||
// find if there is a matched name with current kb under current authoring key
|
||||
for (let kb of kbs) {
|
||||
if (kb.name === kbName) {
|
||||
kbId = kb.id
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// delete the kb if it already exists
|
||||
if (kbId !== '') {
|
||||
await qnaBuildCore.deleteKB(kbId)
|
||||
}
|
||||
|
||||
// create a new kb
|
||||
kbId = await this.createUrlKB(qnaBuildCore, url, kbName)
|
||||
|
||||
const kbJson = await qnaBuildCore.exportKB(kbId, 'Test')
|
||||
const kb = new KB(kbJson)
|
||||
const kbToLuContent = kb.parseToLuContent()
|
||||
await qnaBuildCore.deleteKB(kbId)
|
||||
|
||||
return kbToLuContent
|
||||
}
|
||||
|
||||
async importFileReference(
|
||||
fileName: string,
|
||||
fileUri: string,
|
||||
subscriptionkey: string,
|
||||
endpoint: string,
|
||||
kbName: string) {
|
||||
const qnaBuildCore = new QnaBuildCore(subscriptionkey, endpoint)
|
||||
const kbs = (await qnaBuildCore.getKBList()).knowledgebases
|
||||
|
||||
let kbId = ''
|
||||
// find if there is a matched name with current kb under current authoring key
|
||||
for (let kb of kbs) {
|
||||
if (kb.name === kbName) {
|
||||
kbId = kb.id
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// delete the kb if it already exists
|
||||
if (kbId !== '') {
|
||||
await qnaBuildCore.deleteKB(kbId)
|
||||
}
|
||||
|
||||
// create a new kb
|
||||
kbId = await this.createFileKB(qnaBuildCore, fileName, fileUri, kbName)
|
||||
|
||||
const kbJson = await qnaBuildCore.exportKB(kbId, 'Test')
|
||||
const kb = new KB(kbJson)
|
||||
const kbToLuContent = kb.parseToLuContent()
|
||||
await qnaBuildCore.deleteKB(kbId)
|
||||
|
||||
return kbToLuContent
|
||||
}
|
||||
|
||||
async writeDialogAssets(contents: any[], force: boolean, out: string) {
|
||||
let writeDone = false
|
||||
|
||||
for (const content of contents) {
|
||||
let outFilePath
|
||||
if (out) {
|
||||
outFilePath = path.join(path.resolve(out), path.basename(content.path))
|
||||
} else {
|
||||
outFilePath = content.path
|
||||
}
|
||||
|
||||
let fileExists = fs.existsSync(outFilePath)
|
||||
if (fileExists && outFilePath.endsWith('.lu.qna.dialog')) {
|
||||
let existingCTRecognizerObject = JSON.parse(await fileHelper.getContentFromFile(outFilePath))
|
||||
let currentCTRecognizerObject = JSON.parse(content.content)
|
||||
let ctRecognizerToBeMerged = currentCTRecognizerObject.recognizers.filter((r: string) => !existingCTRecognizerObject.recognizers.includes(r))
|
||||
existingCTRecognizerObject.recognizers = existingCTRecognizerObject.recognizers.concat(ctRecognizerToBeMerged)
|
||||
content.content = JSON.stringify(existingCTRecognizerObject, null, 4)
|
||||
}
|
||||
|
||||
if (force || !fileExists) {
|
||||
this.handler(`Writing to ${outFilePath}\n`)
|
||||
await fs.writeFile(outFilePath, content.content, 'utf-8')
|
||||
writeDone = true
|
||||
}
|
||||
}
|
||||
|
||||
return writeDone
|
||||
}
|
||||
|
||||
generateDeclarativeAssets(recognizers: Array<Recognizer>, multiRecognizers: Array<MultiLanguageRecognizer>, settings: Settings, crosstrainedRecognizers: Array<CrossTrainedRecognizer>)
|
||||
: Array<any> {
|
||||
let contents = new Array<any>()
|
||||
for (const recognizer of recognizers) {
|
||||
let content = new Content(recognizer.save(), new LUOptions(path.basename(recognizer.getDialogPath()), true, '', recognizer.getDialogPath()))
|
||||
contents.push(content)
|
||||
}
|
||||
|
||||
for (const multiRecognizer of multiRecognizers) {
|
||||
const multiLangContent = new Content(multiRecognizer.save(), new LUOptions(path.basename(multiRecognizer.getDialogPath()), true, '', multiRecognizer.getDialogPath()))
|
||||
contents.push(multiLangContent)
|
||||
}
|
||||
|
||||
if (settings) {
|
||||
const settingsContent = new Content(settings.save(), new LUOptions(path.basename(settings.getSettingsPath()), true, '', settings.getSettingsPath()))
|
||||
contents.push(settingsContent)
|
||||
}
|
||||
|
||||
for (const crosstrainedRecognizer of crosstrainedRecognizers) {
|
||||
const crosstrainedContent = new Content(crosstrainedRecognizer.save(), new LUOptions(path.basename(crosstrainedRecognizer.getDialogPath()), true, '', crosstrainedRecognizer.getDialogPath()))
|
||||
contents.push(crosstrainedContent)
|
||||
}
|
||||
|
||||
return contents
|
||||
}
|
||||
|
||||
async initQnaFromContent(content: any, botName: string, suffix: string) {
|
||||
let currentQna = await qnaMakerBuilder.fromContent(content.content)
|
||||
if (!currentQna.kb.name) currentQna.kb.name = `${botName}(${suffix}).${content.language}.qna`
|
||||
|
||||
return {kb: currentQna.kb, alterations: currentQna.alterations}
|
||||
}
|
||||
|
||||
async updateKB(currentKB: any, qnaBuildCore: QnaBuildCore, recognizer: Recognizer, delayDuration: number) {
|
||||
await delay(delayDuration)
|
||||
const existingKB = await qnaBuildCore.exportKB(recognizer.getKBId(), 'Prod')
|
||||
|
||||
// compare models
|
||||
const isKBEqual = qnaBuildCore.isKBEqual(currentKB, existingKB)
|
||||
if (!isKBEqual) {
|
||||
try {
|
||||
this.handler(`Updating to new version for kb ${currentKB.name}...\n`)
|
||||
await delay(delayDuration)
|
||||
await qnaBuildCore.replaceKB(recognizer.getKBId(), currentKB)
|
||||
|
||||
this.handler(`Updating finished for kb ${currentKB.name}\n`)
|
||||
} catch (err) {
|
||||
err.text = `Updating knowledge base failed: \n${err.text}`
|
||||
throw err
|
||||
}
|
||||
|
||||
return true
|
||||
} else {
|
||||
this.handler(`kb ${currentKB.name} has no changes\n`)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async createKB(currentKB: any, qnaBuildCore: QnaBuildCore, recognizers: Recognizer[], delayDuration: number) {
|
||||
this.handler(`Creating qnamaker KB: ${currentKB.name}...\n`)
|
||||
await delay(delayDuration)
|
||||
const emptyKBJson = {
|
||||
name: currentKB.name,
|
||||
qnaList: [],
|
||||
urls: [],
|
||||
files: []
|
||||
}
|
||||
let response = await qnaBuildCore.importKB(emptyKBJson)
|
||||
let operationId = response.operationId
|
||||
let kbId = ''
|
||||
|
||||
try {
|
||||
const opResult = await this.getKBOperationStatus(qnaBuildCore, operationId, delayDuration)
|
||||
kbId = opResult.resourceLocation.split('/')[2]
|
||||
await delay(delayDuration)
|
||||
await qnaBuildCore.replaceKB(kbId, currentKB)
|
||||
|
||||
this.handler(`Creating finished for kb ${currentKB.name}\n`)
|
||||
} catch (err) {
|
||||
err.text = `Creating knowledge base failed: \n${err.text}`
|
||||
throw err
|
||||
}
|
||||
|
||||
recognizers.forEach((recogizer: Recognizer) => recogizer.setKBId(kbId))
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
async createUrlKB(qnaBuildCore: QnaBuildCore, url: string, kbName: string) {
|
||||
const kbJson = {
|
||||
name: kbName,
|
||||
qnaList: [],
|
||||
urls: [url],
|
||||
files: []
|
||||
}
|
||||
|
||||
let response = await qnaBuildCore.importKB(kbJson)
|
||||
let operationId = response.operationId
|
||||
const opResult = await this.getKBOperationStatus(qnaBuildCore, operationId, 1000)
|
||||
const kbId = opResult.resourceLocation.split('/')[2]
|
||||
|
||||
return kbId
|
||||
}
|
||||
|
||||
async createFileKB(qnaBuildCore: QnaBuildCore, fileName: string, fileUri: string, kbName: string) {
|
||||
let kbJson = {
|
||||
name: kbName,
|
||||
qnaList: [],
|
||||
urls: [],
|
||||
files: [{
|
||||
fileName,
|
||||
fileUri
|
||||
}]
|
||||
}
|
||||
|
||||
let response = await qnaBuildCore.importKB(kbJson)
|
||||
let operationId = response.operationId
|
||||
const opResult = await this.getKBOperationStatus(qnaBuildCore, operationId, 1000)
|
||||
const kbId = opResult.resourceLocation.split('/')[2]
|
||||
|
||||
return kbId
|
||||
}
|
||||
|
||||
async getKBOperationStatus(qnaBuildCore: QnaBuildCore, operationId: string, delayDuration: number) {
|
||||
let opResult
|
||||
let isGetting = true
|
||||
while (isGetting) {
|
||||
await delay(delayDuration)
|
||||
opResult = await qnaBuildCore.getOperationStatus(operationId)
|
||||
|
||||
if (opResult.operationState === 'Failed') {
|
||||
throw(new exception(retCode.errorCode.INVALID_INPUT_FILE, JSON.stringify(opResult, null, 4)))
|
||||
}
|
||||
|
||||
if (opResult.operationState === 'Succeeded') isGetting = false
|
||||
}
|
||||
|
||||
return opResult
|
||||
}
|
||||
|
||||
async publishKB(qnaBuildCore: QnaBuildCore, recognizer: Recognizer, kbName: string, delayDuration: number) {
|
||||
// publish applications
|
||||
this.handler(`Publishing kb ${kbName}...\n`)
|
||||
await delay(delayDuration)
|
||||
await qnaBuildCore.publishKB(recognizer.getKBId())
|
||||
this.handler(`Publishing finished for kb ${kbName}\n`)
|
||||
}
|
||||
|
||||
async resolveMergedQnAContentIds(contents: Map<string, any>, objects: Map<string, any[]>, importResolver?: object) {
|
||||
for (const [name, content] of contents) {
|
||||
let qnaObjects = objects.get(name)
|
||||
try {
|
||||
let result = await qnaBuilderVerbose.build(qnaObjects, true, importResolver)
|
||||
let mergedContent = result.parseToQnAContent()
|
||||
content.content = mergedContent
|
||||
contents.set(name, content)
|
||||
} catch (err) {
|
||||
if (err.source) {
|
||||
err.text = `Invalid QnA file ${err.source}: ${err.text}`
|
||||
} else {
|
||||
err.text = `Invalid QnA file ${content.path}: ${err.text}`
|
||||
}
|
||||
throw (new exception(retCode.errorCode.INVALID_INPUT_FILE, err.text))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,202 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const retCode = require('./../utils/enums/CLI-errors')
|
||||
const exception = require('./../utils/exception')
|
||||
const {ServiceBase} = require('./serviceBase')
|
||||
const NEWLINE = require('os').EOL
|
||||
|
||||
export class QnaBuildCore {
|
||||
private readonly service: any
|
||||
|
||||
constructor(subscriptionkey: string, endpoint: string) {
|
||||
this.service = new ServiceBase(endpoint, subscriptionkey)
|
||||
}
|
||||
|
||||
public async getKBList() {
|
||||
const response = await this.service.createRequest('/knowledgebases', 'GET')
|
||||
const text = await response.text()
|
||||
const kbList = JSON.parse(text)
|
||||
if (kbList.error) {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, text))
|
||||
}
|
||||
|
||||
return kbList
|
||||
}
|
||||
|
||||
public async getKB(kbId: string) {
|
||||
const response = await this.service.createRequest(`/knowledgebases/${kbId}`, 'GET')
|
||||
const text = await response.text()
|
||||
const kb = JSON.parse(text)
|
||||
if (kb.error) {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, text))
|
||||
}
|
||||
|
||||
return kb
|
||||
}
|
||||
|
||||
public async importKB(kbPayload: any) {
|
||||
const response = await this.service.createRequest('/knowledgebases/createasync', 'POST', kbPayload)
|
||||
const text = await response.text()
|
||||
const status = JSON.parse(text)
|
||||
if (status.error) {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, text))
|
||||
}
|
||||
|
||||
return status
|
||||
}
|
||||
|
||||
public async getOperationStatus(operationId: string) {
|
||||
const response = await this.service.createRequest(`/operations/${operationId}`, 'GET')
|
||||
const text = await response.text()
|
||||
const status = JSON.parse(text)
|
||||
if (status.error) {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, text))
|
||||
}
|
||||
|
||||
return status
|
||||
}
|
||||
|
||||
public async exportKB(kbId: string, environment: string) {
|
||||
const response = await this.service.createRequest(`/knowledgebases/${kbId}/${environment}/qna`, 'GET')
|
||||
const text = await response.text()
|
||||
const kb = JSON.parse(text)
|
||||
if (kb.error) {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, text))
|
||||
}
|
||||
|
||||
return kb
|
||||
}
|
||||
|
||||
public async updateKB(kbId: string, replaceKb: any) {
|
||||
const response = await this.service.createRequest(`/knowledgebases/${kbId}`, 'PATCH', replaceKb)
|
||||
const text = await response.text()
|
||||
const status = JSON.parse(text)
|
||||
if (status.error) {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, text))
|
||||
}
|
||||
|
||||
return status
|
||||
}
|
||||
|
||||
public async replaceKB(kbId: string, replaceKb: any) {
|
||||
const response = await this.service.createRequest(`/knowledgebases/${kbId}`, 'PUT', replaceKb)
|
||||
const text = await response.text()
|
||||
if (text) {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, text))
|
||||
}
|
||||
}
|
||||
|
||||
public async publishKB(kbId: string) {
|
||||
const response = await this.service.createRequest(`/knowledgebases/${kbId}`, 'POST')
|
||||
const text = await response.text()
|
||||
if (text) {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, text))
|
||||
}
|
||||
}
|
||||
|
||||
public async replaceAlt(altJson: any) {
|
||||
const response = await this.service.createRequest('/alterations', 'PUT', altJson)
|
||||
const text = await response.text()
|
||||
if (text) {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, text))
|
||||
}
|
||||
}
|
||||
|
||||
public async getEndpointKeys() {
|
||||
const response = await this.service.createRequest('/endpointkeys', 'GET')
|
||||
const text = await response.text()
|
||||
const endpointKeys = JSON.parse(text)
|
||||
if (endpointKeys.error) {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, text))
|
||||
}
|
||||
|
||||
return endpointKeys
|
||||
}
|
||||
|
||||
public async deleteKB(kbId: string) {
|
||||
const response = await this.service.createRequest(`/knowledgebases/${kbId}`, 'DELETE')
|
||||
const text = await response.text()
|
||||
if (text) {
|
||||
throw (new exception(retCode.errorCode.LUIS_API_CALL_FAILED, text))
|
||||
}
|
||||
}
|
||||
|
||||
public isKBEqual(kbA: any, kbB: any): boolean {
|
||||
const qnaListA = kbA.qnaList
|
||||
const qnaListAToCompare = qnaListA.map((qna: any) => {
|
||||
return {
|
||||
id: qna.id,
|
||||
answer: qna.answer,
|
||||
source: qna.source,
|
||||
questions: qna.questions.slice(),
|
||||
metadata: qna.metadata.slice(),
|
||||
context: qna.context
|
||||
}
|
||||
})
|
||||
|
||||
const qnaDocumentsB = kbB.qnaDocuments || []
|
||||
const qnaListBToCompare = qnaDocumentsB.map((qna: any) => {
|
||||
return {
|
||||
id: qna.id,
|
||||
answer: qna.answer,
|
||||
source: qna.source,
|
||||
questions: qna.questions,
|
||||
metadata: qna.metadata,
|
||||
context: qna.context
|
||||
}
|
||||
})
|
||||
|
||||
const equal = this.isQnaListEqual(qnaListAToCompare, qnaListBToCompare)
|
||||
|
||||
return equal
|
||||
}
|
||||
|
||||
private isQnaListEqual(qnaListA: any, qnaListB: any) {
|
||||
let kbAQnA = this.parseToQnAContent(qnaListA).toLowerCase()
|
||||
let kbBQnA = this.parseToQnAContent(qnaListB).toLowerCase()
|
||||
|
||||
return kbAQnA === kbBQnA
|
||||
}
|
||||
|
||||
private parseToQnAContent(qnaList: any) {
|
||||
let fileContent = ''
|
||||
qnaList.forEach((qnaItem: any) => {
|
||||
fileContent += '<a id = "0"></a>' + NEWLINE + NEWLINE
|
||||
fileContent += '> !# @qna.pair.source = ' + qnaItem.source + NEWLINE + NEWLINE
|
||||
fileContent += '## ? ' + qnaItem.questions[0] + NEWLINE
|
||||
qnaItem.questions.splice(0, 1)
|
||||
qnaItem.questions.forEach((question: any) => {
|
||||
fileContent += '- ' + question + NEWLINE
|
||||
})
|
||||
fileContent += NEWLINE
|
||||
if (qnaItem.metadata && qnaItem.metadata.length > 0) {
|
||||
fileContent += '**Filters:**' + NEWLINE
|
||||
qnaItem.metadata.sort((a: any, b: any) => (a.name > b.name) ? 1 : -1).forEach((filter: any) => {
|
||||
fileContent += '- ' + filter.name + ' = ' + filter.value + NEWLINE
|
||||
})
|
||||
fileContent += NEWLINE
|
||||
}
|
||||
fileContent += '```markdown' + NEWLINE
|
||||
fileContent += qnaItem.answer + NEWLINE
|
||||
fileContent += '```' + NEWLINE
|
||||
if (qnaItem.context && qnaItem.context.prompts && qnaItem.context.prompts.length !== 0) {
|
||||
fileContent += NEWLINE + '**Prompts:**' + NEWLINE
|
||||
qnaItem.context.prompts.forEach((prompt: any) => {
|
||||
fileContent += `- [${prompt.displayText}]`
|
||||
// See if the linked prompt is context only and if so, add the decoration.
|
||||
let promptQnA = qnaList.find((item: any) => item.id === prompt.qnaId)
|
||||
if (promptQnA) {
|
||||
fileContent += promptQnA.context.isContextOnly === true ? ' \`context-only\`' : ''
|
||||
}
|
||||
fileContent += NEWLINE
|
||||
})
|
||||
}
|
||||
fileContent += NEWLINE
|
||||
})
|
||||
|
||||
return fileContent
|
||||
}
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
export class CrossTrainedRecognizer {
|
||||
public recognizers: any
|
||||
private readonly dialogPath: string
|
||||
private readonly $schema: string | undefined
|
||||
|
||||
constructor(dialogPath: string, recognizers: any, schema?: string) {
|
||||
this.dialogPath = dialogPath
|
||||
this.recognizers = recognizers
|
||||
this.$schema = schema
|
||||
}
|
||||
|
||||
save(): string {
|
||||
let output: any = {
|
||||
$kind: 'Microsoft.CrossTrainedRecognizerSet',
|
||||
recognizers: this.recognizers
|
||||
}
|
||||
|
||||
if (this.$schema) {
|
||||
output = {$schema: this.$schema, ...output}
|
||||
}
|
||||
|
||||
return JSON.stringify(output, null, 4)
|
||||
}
|
||||
|
||||
getDialogPath(): string {
|
||||
return this.dialogPath
|
||||
}
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
/*!
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License.
|
||||
*/
|
||||
|
||||
const path = require('path')
|
||||
|
||||
export class MultiLanguageRecognizer {
|
||||
public recognizers: any
|
||||
private readonly id: string
|
||||
private readonly dialogPath: string
|
||||
private readonly $schema: string | undefined
|
||||
|
||||
constructor(dialogPath: string, recognizers: any, schema?: string) {
|
||||
this.id = `QnA_${path.basename(dialogPath).split('.')[0]}`
|
||||
this.dialogPath = dialogPath
|
||||
this.recognizers = recognizers
|
||||
this.$schema = schema
|
||||
}
|
||||
|
||||
save(): string {
|
||||
let output: any = {
|
||||
$kind: 'Microsoft.MultiLanguageRecognizer',
|
||||
id: this.id,
|
||||
recognizers: this.recognizers
|
||||
}
|
||||
|
||||
if (this.$schema) {
|
||||
output = {$schema: this.$schema, ...output}
|
||||
}
|
||||
|
||||
return JSON.stringify(output, null, 4)
|
||||
}
|
||||
|
||||
getDialogPath(): string {
|
||||
return this.dialogPath
|
||||
}
|
||||
}
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче